text stringlengths 0 1.05M | meta dict |
|---|---|
__author__ = 'chris'
from zope.interface.verify import verifyObject
from txrudp.rudp import ConnectionMultiplexer
from txrudp.connection import HandlerFactory, Handler, State
from txrudp.crypto_connection import CryptoConnectionFactory
from twisted.internet.task import LoopingCall
from twisted.internet import task, reactor
from interfaces import MessageProcessor
from protos.message import Message
from log import Logger
from dht.node import Node
from protos.message import PING, NOT_FOUND
from net.dos import BanScore
class OpenBazaarProtocol(ConnectionMultiplexer):
"""
A protocol extending the txrudp datagram protocol. This is the main protocol
which gets passed into the twisted UDPServer. It handles the setup and tear down
of all connections, parses messages coming off the wire and passes them off to
the appropriate classes for processing.
"""
def __init__(self, ip_address, nat_type, testnet=False):
"""
Initialize the new protocol with the connection handler factory.
Args:
ip_address: a `tuple` of the (ip address, port) of ths node.
"""
self.ip_address = ip_address
self.testnet = testnet
self.ws = None
self.blockchain = None
self.processors = []
self.factory = self.ConnHandlerFactory(self.processors, nat_type)
self.log = Logger(system=self)
ConnectionMultiplexer.__init__(self, CryptoConnectionFactory(self.factory), self.ip_address[0])
class ConnHandler(Handler):
def __init__(self, processors, nat_type, *args, **kwargs):
super(OpenBazaarProtocol.ConnHandler, self).__init__(*args, **kwargs)
self.log = Logger(system=self)
self.processors = processors
self.connection = None
self.node = None
self.keep_alive_loop = LoopingCall(self.keep_alive)
self.keep_alive_loop.start(300 if nat_type == "Full Cone" else 30, now=False)
self.on_connection_made()
self.addr = None
self.ban_score = None
def on_connection_made(self):
if self.connection is None or self.connection.state == State.CONNECTING:
return task.deferLater(reactor, 1, self.on_connection_made)
if self.connection.state == State.CONNECTED:
self.ban_score = BanScore((str(self.connection.dest_addr[0]),
int(self.connection.dest_addr[1])), self.processors[0].multiplexer)
self.addr = str(self.connection.dest_addr[0]) + ":" + str(self.connection.dest_addr[1])
self.log.info("connected to %s" % self.addr)
def receive_message(self, datagram):
if len(datagram) < 166:
self.log.warning("received datagram too small from %s, ignoring" % self.addr)
return False
m = Message()
try:
m.ParseFromString(datagram)
self.node = Node(m.sender.guid, m.sender.ip, m.sender.port,
m.sender.signedPublicKey, m.sender.vendor)
for processor in self.processors:
if m.command in processor or m.command == NOT_FOUND:
processor.receive_message(datagram, self.connection, self.ban_score)
except Exception:
# If message isn't formatted property then ignore
self.log.warning("received unknown message from %s, ignoring" % self.addr)
return False
def handle_shutdown(self):
for processor in self.processors:
processor.timeout((self.connection.dest_addr[0], self.connection.dest_addr[1]), self.node)
self.connection.unregister()
if self.addr:
self.log.info("connection with %s terminated" % self.addr)
self.ban_score.scoring_loop.stop()
try:
self.keep_alive_loop.stop()
except Exception:
pass
def keep_alive(self):
for processor in self.processors:
if PING in processor and self.node is not None:
processor.callPing(self.node)
class ConnHandlerFactory(HandlerFactory):
def __init__(self, processors, nat_type):
super(OpenBazaarProtocol.ConnHandlerFactory, self).__init__()
self.processors = processors
self.nat_type = nat_type
def make_new_handler(self, *args, **kwargs):
return OpenBazaarProtocol.ConnHandler(self.processors, self.nat_type)
def register_processor(self, processor):
"""Add a new class which implements the `MessageProcessor` interface."""
if verifyObject(MessageProcessor, processor):
self.processors.append(processor)
def unregister_processor(self, processor):
"""Unregister the given processor."""
if processor in self.processors:
self.processors.remove(processor)
def set_servers(self, ws, blockchain):
self.ws = ws
self.blockchain = blockchain
def send_message(self, datagram, address):
"""
Sends a datagram over the wire to the given address. It will create a new rudp connection if one
does not already exist for this peer.
Args:
datagram: the raw data to send over the wire
address: a `tuple` of (ip address, port) of the recipient.
"""
if address not in self:
con = self.make_new_connection(self.ip_address, address)
else:
con = self[address]
con.send_message(datagram)
| {
"repo_name": "hauxir/OpenBazaar-Server",
"path": "net/wireprotocol.py",
"copies": "1",
"size": "5703",
"license": "mit",
"hash": -9067928369747142000,
"line_mean": 41.2444444444,
"line_max": 110,
"alpha_frac": 0.6189724706,
"autogenerated": false,
"ratio": 4.346798780487805,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5465771251087805,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
from zope.interface.verify import verifyObject
from txrudp.rudp import ConnectionMultiplexer
from txrudp.connection import HandlerFactory, Handler
from txrudp.crypto_connection import CryptoConnectionFactory
from interfaces import MessageProcessor
from protos.message import Message
from log import Logger
class OpenBazaarProtocol(ConnectionMultiplexer):
def __init__(self, ip_address):
"""
Initialize the new protocol with the connection handler factory.
Args:
ip_address: a `tuple` of the (ip address, port) of ths node.
"""
self.ip_address = ip_address
self.processors = []
self.factory = self.ConnHandlerFactory(self.processors)
ConnectionMultiplexer.__init__(self, CryptoConnectionFactory(self.factory), self.ip_address[0])
class ConnHandler(Handler):
def __init__(self, procssors):
self.log = Logger(system=self)
self.processors = procssors
self.connection = None
def receive_message(self, datagram):
if len(datagram) < 22:
self.log.msg("received datagram too small from %s, ignoring" % str(self.connection.dest_addr))
return False
m = Message()
try:
m.ParseFromString(datagram)
for processor in self.processors:
if m.command in processor:
processor.receive_message(datagram, self.connection)
except:
# If message isn't formatted property then ignore
self.log.msg("Received unknown message from %s, ignoring" % self.connection.dest_addr)
return False
def handle_shutdown(self):
self.log.msg("Connection terminated with (%s, %s)" % (self.connection.dest_addr[0], self.connection.dest_addr[1]))
class ConnHandlerFactory(HandlerFactory):
def __init__(self, processors):
self.processors = processors
def make_new_handler(self, *args, **kwargs):
return OpenBazaarProtocol.ConnHandler(self.processors)
def register_processor(self, processor):
"""Add a new class which implements the `MessageProcessor` interface."""
if verifyObject(MessageProcessor, processor):
self.processors.append(processor)
def unregister_processor(self, processor):
"""Unregister the given processor."""
if processor in self.processors:
self.processors.remove(processor)
def send_message(self, datagram, address):
"""
Sends a datagram over the wire to the given address. It will create a new rudp connection if one
does not already exist for this peer.
Args:
datagram: the raw data to send over the wire
address: a `tuple` of (ip address, port) of the recipient.
"""
if address not in self:
con = self.make_new_connection((self.ip_address[0], self.ip_address[1]), address)
else:
con = self[address]
con.send_message(datagram) | {
"repo_name": "jorik041/Network",
"path": "wireprotocol.py",
"copies": "1",
"size": "3131",
"license": "mit",
"hash": 8265029074001010000,
"line_mean": 37.1951219512,
"line_max": 126,
"alpha_frac": 0.6288725647,
"autogenerated": false,
"ratio": 4.518037518037518,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5646910082737518,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import argparse
import codecs
import requests
import xml.etree.ElementTree as ET
import csv
from xml.sax.saxutils import escape
# Example of reading a UTF-8 csv file and importing it into LocDirect
# CSV fields should all be within double quotes like:
# "Strings/","str_1","Press Start"
# if content field contains a " it should be outputted as as "" so a field ["how are you?" - he said] should read as ["""how are you?"" - he said"]
#
# Requires LocDirect server version 2.9.130 or later
# Uses the http / https API interface of LocDirect
# Download it http://localizedirect.com/locdirect_downloads/
# LocDirect server support both http and https please use https for secure transfers
#
# Make sure you create a user(developer type should work in most cases) in LocDirect which is a project member of the project you want to connect to
parser = argparse.ArgumentParser(description='locdirect api command')
parser.add_argument('-u','--user', help='User name', required=True)
parser.add_argument('-pw','--password', help='Password', required=True)
parser.add_argument('-p','--project', help='Project name', required=True)
parser.add_argument('-s','--server', help='Server address', required=True)
parser.add_argument('-i','--input', help='CSV file name', required=True)
parser.add_argument('-f','--fields', help='Comma separated fields for each column (stringId,path,text_xxXX)', required=True)
args = vars(parser.parse_args())
COMPACT_ROW_SEPARATOR = u'\xaf\xaf'
COMPACT_FIELD_SEPARATOR = u'\xac\xac'
# Be careful not to send any empty lines in the xml message as that will generate an error
def loginMessage(user,password):
return '''<?xml version="1.0" encoding="UTF-8"?>
<EXECUTION client="API" version="1.0">
<TASK name="Login">
<OBJECT name="Security" />
<WHERE>
<userName>%s</userName>
<password>%s</password>
</WHERE>
</TASK>
</EXECUTION>''' % (user, password)
def stringImportMessage(secId, projectName, fields, data):
fieldData = ""
for row in data:
for column in row:
fieldData += escape(column.decode('utf-8')) + COMPACT_FIELD_SEPARATOR
fieldData += COMPACT_ROW_SEPARATOR
return '''<?xml version="1.0" encoding="UTF-8"?>
<EXECUTION secId="%s" client="API" version="1.0">
<TASK name="StringImport">
<OBJECT name="String">
<importFields>%s</importFields>
<fieldData>%s</fieldData>
</OBJECT>
<WHERE>
<stringMergeOption>3</stringMergeOption>
<projectName>%s</projectName>
</WHERE>
</TASK>
</EXECUTION>''' % (secId, ';'.join(fields), fieldData, projectName)
# logs in the user and get the secId
def login(user, password):
data = loginMessage(user, password)
r = requests.post(args['server'], data=data.encode('utf-8'), timeout=60)
tree = ET.ElementTree(ET.fromstring(r.text))
root = tree.getroot()
if root.find("./[@committed='true']") is not None:
return root.find('.//secId').text
else:
print root.find(".//MESSAGE").text
print r.text
exit(1)
def main(args):
data = []
fields = args['fields'].split(',')
# Read UTF8 CSV file and check so it has same amount of columns as fields
with open(args['input'],'r') as fin:
reader=csv.reader(fin)
for row in reader:
if len(list(row)) == len(fields):
data.append(list(row))
else:
print "Column mismatch"
print fields
print list(row)
exit(1)
# Login
print "Logging in %s" % args['user']
secId = login(args['user'], args['password'])
# Send data
print "Sending data to server"
message = stringImportMessage(secId, args['project'], fields, data)
r = requests.post(args['server'], data=message.encode('utf-8'), timeout=60)
tree = ET.ElementTree(ET.fromstring(r.content))
root = tree.getroot()
if root.find("./[@committed='true']") is not None:
print "Strings imported"
else:
print "Error"
print root.find(".//MESSAGE").text
print r.text
exit(1)
if __name__ == '__main__':
main(args) | {
"repo_name": "locdirect/http-api-examples",
"path": "ld_http_import.py",
"copies": "1",
"size": "4252",
"license": "mit",
"hash": -1145478440279634800,
"line_mean": 33.8606557377,
"line_max": 148,
"alpha_frac": 0.6333490122,
"autogenerated": false,
"ratio": 3.759504862953139,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4892853875153139,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import argparse
import os
import sys
from django.core.management.base import BaseCommand, CommandError
from django.core.files import File
from ...backend.utils import add_wooey_script, default_storage, get_storage
from ... import settings as wooey_settings
class Command(BaseCommand):
help = 'Adds a script to Wooey'
def add_arguments(self, parser):
parser.add_argument('script', type=str, help='A script or folder of scripts to add to Wooey.')
parser.add_argument(
'--group',
dest='group',
default=wooey_settings.WOOEY_DEFAULT_SCRIPT_GROUP,
help='The name of the group to create scripts under. Default: Wooey Scripts'
)
parser.add_argument(
'--name',
dest='name',
default=None,
help='The name of the script. Default: None (uses the filename)'
)
parser.add_argument(
'--update',
dest='update',
action='store_true',
help=argparse.SUPPRESS
)
def handle(self, *args, **options):
script = options.get('script')
if options.get('update'):
sys.stdout.write('Explicit script updates are no longer required and this flag is ignored.')
if not script:
if len(args):
script = args[-1]
else:
raise CommandError('You must provide a script path or directory containing scripts.')
if not os.path.exists(script):
raise CommandError('{0} does not exist.'.format(script))
group = options.get('group', 'Wooey Scripts')
scripts = [os.path.join(script, i) for i in os.listdir(script)] if os.path.isdir(script) else [script]
converted = 0
for script in scripts:
if script.endswith('.pyc') or '__init__' in script:
continue
if script.endswith('.py'):
sys.stdout.write('Converting {}\n'.format(script))
# copy the script to our storage
base_name = options.get('name') or os.path.splitext(os.path.split(script)[1])[0]
with open(script, 'r') as f:
script = default_storage.save(os.path.join(wooey_settings.WOOEY_SCRIPT_DIR, os.path.split(script)[1]), File(f))
if wooey_settings.WOOEY_EPHEMERAL_FILES:
# save it locally as well (the default_storage will default to the remote store)
local_storage = get_storage(local=True)
local_storage.save(os.path.join(wooey_settings.WOOEY_SCRIPT_DIR, os.path.split(script)[1]), File(f))
add_kwargs = {
'script_path': script,
'group': group,
'script_name': base_name,
}
res = add_wooey_script(**add_kwargs)
if res['valid']:
converted += 1
sys.stdout.write('Converted {} scripts\n'.format(converted))
| {
"repo_name": "wooey/Wooey",
"path": "wooey/management/commands/addscript.py",
"copies": "1",
"size": "3058",
"license": "bsd-3-clause",
"hash": -5623689245226082000,
"line_mean": 42.0704225352,
"line_max": 131,
"alpha_frac": 0.5631131458,
"autogenerated": false,
"ratio": 4.171896316507503,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5235009462307503,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import bitcoin
import nacl.signing
import nacl.encoding
from db.datastore import KeyStore
from keyutils.guid import GUID
from nacl.public import PrivateKey
class KeyChain(object):
def __init__(self):
self.db = KeyStore()
guid_keys = self.db.get_key("guid")
if guid_keys is None:
self.create_keychain()
else:
g = GUID.from_privkey(guid_keys[0])
self.guid = g.guid
self.guid_privkey = g.privkey
self.signing_key = nacl.signing.SigningKey(self.guid_privkey)
self.guid_signed_pubkey = g.signed_pubkey
# pylint: disable=W0633
self.bitcoin_master_privkey, self.bitcoin_master_pubkey = self.db.get_key("bitcoin")
self.encryption_key = PrivateKey(self.guid_privkey)
self.encryption_pubkey = self.encryption_key.public_key.encode()
def create_keychain(self):
print "Generating GUID, stand by..."
g = GUID()
self.guid = g.guid
self.guid_privkey = g.privkey
self.signing_key = nacl.signing.SigningKey(self.guid_privkey)
self.guid_signed_pubkey = g.signed_pubkey
self.db.set_key("guid", self.guid_privkey, self.guid_signed_pubkey)
self.bitcoin_master_privkey = bitcoin.bip32_master_key(bitcoin.sha256(self.guid_privkey))
self.bitcoin_master_pubkey = bitcoin.bip32_privtopub(self.bitcoin_master_privkey)
self.db.set_key("bitcoin", self.bitcoin_master_privkey, self.bitcoin_master_pubkey)
self.encryption_key = PrivateKey(self.guid_privkey)
self.encryption_pubkey = self.encryption_key.public_key.encode()
| {
"repo_name": "bankonme/OpenBazaar-Server",
"path": "keyutils/keys.py",
"copies": "4",
"size": "1674",
"license": "mit",
"hash": 4772613520031477000,
"line_mean": 39.8292682927,
"line_max": 97,
"alpha_frac": 0.6541218638,
"autogenerated": false,
"ratio": 3.4875,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0020527000726423806,
"num_lines": 41
} |
__author__ = 'chris'
import bitcoin
import nacl.signing
import nacl.encoding
from keyutils.guid import GUID
from nacl.public import PrivateKey
class KeyChain(object):
def __init__(self, database):
self.db = database.KeyStore()
guid_keys = self.db.get_key("guid")
if guid_keys is None:
self.create_keychain()
else:
g = GUID.from_privkey(guid_keys[0])
self.guid = g.guid
self.guid_privkey = g.privkey
self.signing_key = nacl.signing.SigningKey(self.guid_privkey)
self.guid_signed_pubkey = g.signed_pubkey
# pylint: disable=W0633
self.bitcoin_master_privkey, self.bitcoin_master_pubkey = self.db.get_key("bitcoin")
self.encryption_key = PrivateKey(self.guid_privkey)
self.encryption_pubkey = self.encryption_key.public_key.encode()
def create_keychain(self):
print "Generating GUID, stand by..."
g = GUID()
self.guid = g.guid
self.guid_privkey = g.privkey
self.signing_key = nacl.signing.SigningKey(self.guid_privkey)
self.guid_signed_pubkey = g.signed_pubkey
self.db.set_key("guid", self.guid_privkey, self.guid_signed_pubkey)
self.bitcoin_master_privkey = bitcoin.bip32_master_key(bitcoin.sha256(self.guid_privkey))
self.bitcoin_master_pubkey = bitcoin.bip32_privtopub(self.bitcoin_master_privkey)
self.db.set_key("bitcoin", self.bitcoin_master_privkey, self.bitcoin_master_pubkey)
self.encryption_key = PrivateKey(self.guid_privkey)
self.encryption_pubkey = self.encryption_key.public_key.encode()
| {
"repo_name": "JimmyMow/OpenBazaar-Server",
"path": "keyutils/keys.py",
"copies": "3",
"size": "1659",
"license": "mit",
"hash": 782470814152704500,
"line_mean": 40.475,
"line_max": 97,
"alpha_frac": 0.6522001206,
"autogenerated": false,
"ratio": 3.485294117647059,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5637494238247058,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import bitcointools
import nacl.signing
import nacl.encoding
import threading
from keys.guid import GUID
class KeyChain(object):
def __init__(self, database, callback=None, heartbeat_server=None):
self.db = database
guid_keys = self.db.keys.get_key("guid")
if guid_keys is None:
if heartbeat_server:
heartbeat_server.set_status("generating GUID")
threading.Thread(target=self.create_keychain, args=[callback]).start()
else:
g = GUID.from_privkey(guid_keys[0])
self.guid = g.guid
self.signing_key = g.signing_key
self.verify_key = g.verify_key
# pylint: disable=W0633
self.bitcoin_master_privkey, self.bitcoin_master_pubkey = self.db.keys.get_key("bitcoin")
self.encryption_key = self.signing_key.to_curve25519_private_key()
self.encryption_pubkey = self.verify_key.to_curve25519_public_key()
if callable(callback):
callback(self)
def create_keychain(self, callback=None):
"""
The guid generation can take a while. While it's doing that we will
open a port to allow a UI to connect and listen for generation to
complete.
"""
print "Generating GUID, this may take a few minutes..."
g = GUID()
self.guid = g.guid
self.signing_key = g.signing_key
self.verify_key = g.verify_key
self.db.keys.set_key("guid", self.signing_key.encode(encoder=nacl.encoding.HexEncoder),
self.verify_key.encode(encoder=nacl.encoding.HexEncoder))
self.bitcoin_master_privkey = bitcointools.bip32_master_key(bitcointools.sha256(self.signing_key.encode()))
self.bitcoin_master_pubkey = bitcointools.bip32_privtopub(self.bitcoin_master_privkey)
self.db.keys.set_key("bitcoin", self.bitcoin_master_privkey, self.bitcoin_master_pubkey)
self.encryption_key = self.signing_key.to_curve25519_private_key()
self.encryption_pubkey = self.verify_key.to_curve25519_public_key()
if callable(callback):
callback(self, True)
| {
"repo_name": "OpenBazaar/OpenBazaar-Server",
"path": "keys/keychain.py",
"copies": "6",
"size": "2191",
"license": "mit",
"hash": 2547109085127205400,
"line_mean": 41.9607843137,
"line_max": 115,
"alpha_frac": 0.6371519854,
"autogenerated": false,
"ratio": 3.758147512864494,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0014356728955902598,
"num_lines": 51
} |
__author__ = 'chris'
import gnupg
from db.datastore import ProfileStore
from protos import objects
from keyutils.keys import KeyChain
class Profile(object):
"""
This is a class which handles creating an updating the user profile.
Data added to a protobuf object and stored in the database. The database
will update automatically when changes are made to the profile. When we
need to send it to our peers, we can just call get().
"""
def __init__(self):
self.profile = objects.Profile()
self.db = ProfileStore()
if self.db.get_proto() is not None:
self.profile.ParseFromString(self.db.get_proto())
def create(self, name, location, handle=None):
self.profile.name = name
self.profile.location = location
if handle is not None:
self.profile.handle = handle
self.profile.encryption_key = KeyChain().encryption_pubkey
self.db.set_proto(self.profile.SerializeToString())
def get(self, serialized=False):
if serialized:
return self.profile.SerializeToString()
return self.profile
def update(self, user_info):
"""
To update the profile, create a new protobuf Profile object and add the
field you want to update.
Example:
u = objects.Profile()
u.about = "hello world"
update(u)
"""
self.profile.MergeFrom(user_info)
self.db.set_proto(self.profile.SerializeToString())
def add_social_account(self, account_type, username, proof):
s = self.profile.SocialAccount()
for social_account in self.profile.social:
if social_account.type == s.SocialType.Value(account_type.upper()):
self.profile.social.remove(social_account)
s.type = s.SocialType.Value(account_type.upper())
s.username = username
s.proof_url = proof
self.profile.social.extend([s])
self.db.set_proto(self.profile.SerializeToString())
def remove_social_account(self, account_type):
s = self.profile.SocialAccount()
for social_account in self.profile.social:
if social_account.type == s.SocialType.Value(account_type.upper()):
self.profile.social.remove(social_account)
self.db.set_proto(self.profile.SerializeToString())
def add_pgp_key(self, public_key, signature, guid):
"""
Adds a pgp public key to the profile. The user must have submitted a
valid signature covering the guid otherwise the key will not be added to
the profile.
"""
gpg = gnupg.GPG()
gpg.import_keys(public_key)
if gpg.verify(signature) and guid in signature:
p = self.profile.PGPKey()
p.publicKey = public_key
p.signature = signature
self.profile.pgp_key.MergeFrom(p)
self.db.set_proto(self.profile.SerializeToString())
return True
else:
return False
def remove_field(self, field):
if field is not "name":
self.profile.ClearField(field)
self.db.set_proto(self.profile.SerializeToString())
| {
"repo_name": "bankonme/OpenBazaar-Server",
"path": "market/profile.py",
"copies": "2",
"size": "3207",
"license": "mit",
"hash": 1187532156559505700,
"line_mean": 35.8620689655,
"line_max": 80,
"alpha_frac": 0.6280012473,
"autogenerated": false,
"ratio": 4.1703511053316,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5798352352631599,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import gnupg
from db.datastore import ProfileStore
from protos import objects
class Profile(object):
"""
This is a class which handles creating an updating the user profile.
Data added to a protobuf object and stored in the database. The database
will update automatically when changes are made to the profile. When we
need to send it to our peers, we can just call get().
"""
def __init__(self):
self.profile = objects.Profile()
self.db = ProfileStore()
if self.db.get_proto() is not None:
self.profile.ParseFromString(self.db.get_proto())
def get(self, serialized=False):
if serialized:
return self.profile.SerializeToString()
return self.profile
def update(self, user_info):
"""
To update the profile, create a new protobuf Profile object and add the
field you want to update.
Example:
u = objects.Profile()
u.about = "hello world"
update(u)
"""
self.profile.MergeFrom(user_info)
self.db.set_proto(self.profile.SerializeToString())
def add_social_account(self, account_type, username, proof):
s = self.profile.SocialAccount()
for social_account in self.profile.social:
if social_account.type == s.SocialType.Value(account_type.upper()):
self.profile.social.remove(social_account)
s.type = s.SocialType.Value(account_type.upper())
s.username = username
s.proof_url = proof
self.profile.social.extend([s])
self.db.set_proto(self.profile.SerializeToString())
def remove_social_account(self, account_type):
s = self.profile.SocialAccount()
for social_account in self.profile.social:
if social_account.type == s.SocialType.Value(account_type.upper()):
self.profile.social.remove(social_account)
self.db.set_proto(self.profile.SerializeToString())
def add_pgp_key(self, public_key, signature, guid):
"""
Adds a pgp public key to the profile. The user must have submitted a
valid signature covering the guid otherwise the key will not be added to
the profile.
"""
gpg = gnupg.GPG()
gpg.import_keys(public_key)
if gpg.verify(signature) and guid in signature:
p = self.profile.PGPKey()
p.publicKey = public_key
p.signature = signature
self.profile.pgp_key.MergeFrom(p)
self.db.set_proto(self.profile.SerializeToString())
return True
else:
return False
def remove_field(self, field):
if field is not "name":
self.profile.ClearField(field)
self.db.set_proto(self.profile.SerializeToString())
| {
"repo_name": "hoffmabc/OpenBazaar-Server",
"path": "market/profile.py",
"copies": "2",
"size": "2847",
"license": "mit",
"hash": 4112070733113801700,
"line_mean": 35.5,
"line_max": 80,
"alpha_frac": 0.6227608008,
"autogenerated": false,
"ratio": 4.156204379562044,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5778965180362043,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import gnupg
from protos import objects
class Profile(object):
"""
This is a class which handles creating an updating the user profile.
Data added to a protobuf object and stored in the database. The database
will update automatically when changes are made to the profile. When we
need to send it to our peers, we can just call get().
"""
def __init__(self, db):
self.profile = objects.Profile()
self.db = db.ProfileStore()
if self.db.get_proto() is not None:
self.profile.ParseFromString(self.db.get_proto())
def get(self, serialized=False):
if serialized:
return self.profile.SerializeToString()
return self.profile
def update(self, user_info):
"""
To update the profile, create a new protobuf Profile object and add the
field you want to update.
Example:
u = objects.Profile()
u.about = "hello world"
update(u)
"""
self.profile.MergeFrom(user_info)
self.db.set_proto(self.profile.SerializeToString())
def add_social_account(self, account_type, username, proof):
s = self.profile.SocialAccount()
for social_account in self.profile.social:
if social_account.type == s.SocialType.Value(account_type.upper()):
self.profile.social.remove(social_account)
s.type = s.SocialType.Value(account_type.upper())
s.username = username
s.proof_url = proof
self.profile.social.extend([s])
self.db.set_proto(self.profile.SerializeToString())
def remove_social_account(self, account_type):
s = self.profile.SocialAccount()
for social_account in self.profile.social:
if social_account.type == s.SocialType.Value(account_type.upper()):
self.profile.social.remove(social_account)
self.db.set_proto(self.profile.SerializeToString())
def add_pgp_key(self, public_key, signature, guid):
"""
Adds a pgp public key to the profile. The user must have submitted a
valid signature covering the guid otherwise the key will not be added to
the profile.
"""
gpg = gnupg.GPG()
gpg.import_keys(public_key)
if gpg.verify(signature) and guid in signature:
p = self.profile.PublicKey()
p.public_key = public_key
p.signature = signature
self.profile.pgp_key.MergeFrom(p)
self.db.set_proto(self.profile.SerializeToString())
return True
else:
return False
def remove_field(self, field):
if field is not "name":
self.profile.ClearField(field)
self.db.set_proto(self.profile.SerializeToString())
| {
"repo_name": "JimmyMow/OpenBazaar-Server",
"path": "market/profile.py",
"copies": "3",
"size": "2820",
"license": "mit",
"hash": -1500793675124971000,
"line_mean": 35.6233766234,
"line_max": 80,
"alpha_frac": 0.6195035461,
"autogenerated": false,
"ratio": 4.147058823529412,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6266562369629411,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import gnupg
from protos import objects
class Profile(object):
"""
This is a class which handles creating an updating the user profile.
Data added to a protobuf object and stored in the database. The database
will update automatically when changes are made to the profile. When we
need to send it to our peers, we can just call get().
"""
def __init__(self, db):
self.profile = objects.Profile()
self.db = db
if self.db.profile.get_proto() is not None:
self.profile.ParseFromString(self.db.profile.get_proto())
def get(self, serialized=False):
if serialized:
return self.profile.SerializeToString()
return self.profile
def update(self, user_info):
"""
To update the profile, create a new protobuf Profile object and add the
field you want to update.
Example:
u = objects.Profile()
u.about = "hello world"
update(u)
"""
self.profile.MergeFrom(user_info)
self.db.profile.set_proto(self.profile.SerializeToString())
def add_social_account(self, account_type, username, proof=None):
s = self.profile.SocialAccount()
try:
self._remove_social_if_found(account_type)
s.type = s.SocialType.Value(account_type.upper())
s.username = username
if proof:
s.proof_url = proof
self.profile.social.extend([s])
except ValueError:
return
self.db.profile.set_proto(self.profile.SerializeToString())
def remove_social_account(self, account_type):
try:
self._remove_social_if_found(account_type)
except ValueError:
return
self.db.profile.set_proto(self.profile.SerializeToString())
def _remove_social_if_found(self, account_type):
s = self.profile.SocialAccount()
st = s.SocialType.Value(account_type.upper())
for social_account in self.profile.social:
if social_account.type == st:
self.profile.social.remove(social_account)
self.db.profile.set_proto(self.profile.SerializeToString())
def add_pgp_key(self, public_key, signature, guid):
"""
Adds a pgp public key to the profile. The user must have submitted a
valid signature covering the guid otherwise the key will not be added to
the profile.
"""
gpg = gnupg.GPG()
gpg.import_keys(public_key)
if gpg.verify(signature) and guid in signature:
p = self.profile.PublicKey()
p.public_key = public_key
p.signature = signature
self.profile.pgp_key.MergeFrom(p)
self.db.profile.set_proto(self.profile.SerializeToString())
return True
else:
return False
def remove_field(self, field):
if field is not "name":
self.profile.ClearField(field)
self.db.profile.set_proto(self.profile.SerializeToString())
def get_temp_handle(self):
return self.db.profile.get_temp_handle()
| {
"repo_name": "tyler-smith/OpenBazaar-Server",
"path": "market/profile.py",
"copies": "6",
"size": "3165",
"license": "mit",
"hash": -8791032331803363000,
"line_mean": 34.1666666667,
"line_max": 80,
"alpha_frac": 0.6107424961,
"autogenerated": false,
"ratio": 4.153543307086614,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0001371742112482853,
"num_lines": 90
} |
__author__ = 'chris'
import json
import os
from txrestapi.resource import APIResource
from txrestapi.methods import GET, POST, DELETE
from twisted.web import server
from twisted.web.resource import NoResource
from twisted.web import http
from twisted.internet import defer
from binascii import unhexlify
from constants import DATA_FOLDER
from twisted.protocols.basic import FileSender
from protos.countries import CountryCode
from protos import objects
from db.datastore import HashMap, FollowData, ListingsStore
from keyutils.keys import KeyChain
from dht.utils import digest
from market.profile import Profile
from market.contracts import Contract
from collections import OrderedDict
DEFAULT_RECORDS_COUNT = 20
DEFAULT_RECORDS_OFFSET = 0
class OpenBazaarAPI(APIResource):
"""
This RESTful API allows clients to pull relevant data from the
OpenBazaar daemon for use in a GUI or other application.
"""
def __init__(self, mserver, kserver):
self.mserver = mserver
self.kserver = kserver
APIResource.__init__(self)
@GET('^/api/v1/get_image')
def get_image(self, request):
@defer.inlineCallbacks
def _showImage(resp=None):
@defer.inlineCallbacks
def _setContentDispositionAndSend(file_path, extension, content_type):
request.setHeader('content-disposition', 'filename="%s.%s"' % (file_path, extension))
request.setHeader('content-type', content_type)
f = open(file_path, "rb")
yield FileSender().beginFileTransfer(f, request)
f.close()
defer.returnValue(0)
if os.path.exists(image_path):
yield _setContentDispositionAndSend(image_path, ".jpg", "image/jpeg")
else:
request.setResponseCode(http.NOT_FOUND)
request.write("No such image '%s'" % request.path)
request.finish()
if "hash" in request.args:
if HashMap().get_file(unhexlify(request.args["hash"][0])) is not None:
image_path = HashMap().get_file(unhexlify(request.args["hash"][0]))
else:
image_path = DATA_FOLDER + "cache/" + request.args["hash"][0]
if not os.path.exists(image_path) and "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.get_image(node, unhexlify(request.args["hash"][0])).addCallback(_showImage)
else:
_showImage()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
_showImage()
else:
request.write(NoResource().render(request))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_profile')
def get_profile(self, request):
def parse_profile(profile):
if profile is not None:
profile_json = {
"profile": {
"name": profile.name,
"location": str(CountryCode.Name(profile.location)),
"enryption_key": profile.encryption_key.encode("hex"),
"nsfw": profile.nsfw,
"vendor": profile.vendor,
"moderator": profile.moderator,
"handle": profile.handle,
"about": profile.about,
"website": profile.website,
"email": profile.email,
"primary_color": profile.primary_color,
"secondary_color": profile.secondary_color,
"background_color": profile.background_color,
"text_color": profile.text_color,
"pgp_key": profile.pgp_key.publicKey,
"avatar_hash": profile.avatar_hash.encode("hex"),
"header_hash": profile.header_hash.encode("hex"),
"social_accounts": {}
}
}
if "guid" in request.args:
profile_json["profile"]["guid"] = request.args["guid"][0]
else:
profile_json["profile"]["guid"] = KeyChain().guid.encode("hex")
for account in profile.social:
profile_json["profile"]["social_accounts"][str(
objects.Profile.SocialAccount.SocialType.Name(account.type)).lower()] = {
"username": account.username,
"proof_url": account.proof_url
}
request.setHeader('content-type', "application/json")
request.write(json.dumps(profile_json, indent=4))
request.finish()
else:
request.write(NoResource().render(request))
request.finish()
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.get_profile(node).addCallback(parse_profile)
else:
request.write(NoResource().render(request))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
parse_profile(Profile().get())
return server.NOT_DONE_YET
@GET('^/api/v1/get_listings')
def get_listings(self, request):
def parse_listings(listings):
if listings is not None:
response = {"listings": []}
for l in listings.listing:
listing_json = {
"title": l.title,
"contract_hash": l.contract_hash.encode("hex"),
"thumbnail_hash": l.thumbnail_hash.encode("hex"),
"category": l.category,
"price": l.price,
"currency_code": l.currency_code,
"nsfw": l.nsfw,
"origin": str(CountryCode.Name(l.origin)),
"ships_to": []
}
for country in l.ships_to:
listing_json["ships_to"].append(str(CountryCode.Name(country)))
response["listings"].append(listing_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(response, indent=4))
request.finish()
else:
request.write(NoResource().render(request))
request.finish()
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.get_listings(node).addCallback(parse_listings)
else:
request.write(NoResource().render(request))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
ser = ListingsStore().get_proto()
if ser is not None:
l = objects.Listings()
l.ParseFromString(ser)
parse_listings(l)
else:
parse_listings(None)
return server.NOT_DONE_YET
@GET('^/api/v1/get_followers')
def get_followers(self, request):
def parse_followers(followers):
if followers is not None:
response = {"followers": []}
for f in followers.followers:
follower_json = {
"guid": f.guid.encode("hex"),
"handle": f.metadata.handle,
"name": f.metadata.name,
"avatar_hash": f.metadata.avatar_hash.encode("hex"),
"nsfw": f.metadata.nsfw
}
response["followers"].append(follower_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(response, indent=4))
request.finish()
else:
request.write(NoResource().render(request))
request.finish()
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.get_followers(node).addCallback(parse_followers)
else:
request.write(NoResource().render(request))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
ser = FollowData().get_followers()
if ser is not None:
f = objects.Followers()
f.ParseFromString(ser)
parse_followers(f)
else:
parse_followers(None)
return server.NOT_DONE_YET
@GET('^/api/v1/get_following')
def get_following(self, request):
def parse_following(following):
if following is not None:
response = {"following": []}
for f in following.users:
user_json = {
"guid": f.guid.encode("hex"),
"handle": f.metadata.handle,
"name": f.metadata.name,
"avatar_hash": f.metadata.avatar_hash.encode("hex"),
"nsfw": f.metadata.nsfw
}
response["following"].append(user_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(response, indent=4))
request.finish()
else:
request.write(NoResource().render(request))
request.finish()
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.get_following(node).addCallback(parse_following)
else:
request.write(NoResource().render(request))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
ser = FollowData().get_following()
if ser is not None:
f = objects.Following()
f.ParseFromString(ser)
parse_following(f)
else:
parse_following(None)
return server.NOT_DONE_YET
@POST('^/api/v1/follow')
def follow(self, request):
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.follow(node)
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
@POST('^/api/v1/unfollow')
def unfollow(self, request):
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.unfollow(node)
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
# pylint: disable=R0201
@POST('^/api/v1/update_profile')
def update_profile(self, request):
p = Profile()
if not p.get().encryption_key \
and "name" not in request.args \
and "location" not in request.args:
return "False"
u = objects.Profile()
if "name" in request.args:
u.name = request.args["name"][0]
if "location" in request.args:
# This needs to be formatted. Either here or from the UI.
u.location = CountryCode.Value(request.args["location"][0].upper())
if "handle" in request.args:
u.handle = request.args["handle"][0]
if "about" in request.args:
u.about = request.args["about"][0]
if "nsfw" in request.args:
u.nsfw = True
if "vendor" in request.args:
u.vendor = True
if "moderator" in request.args:
u.moderator = True
if "website" in request.args:
u.website = request.args["website"][0]
if "email" in request.args:
u.email = request.args["email"][0]
if "avatar" in request.args:
with open(DATA_FOLDER + "store/avatar", 'wb') as outfile:
outfile.write(request.args["avatar"][0])
avatar_hash = digest(request.args["avatar"][0])
HashMap().insert(avatar_hash, DATA_FOLDER + "store/avatar")
u.avatar_hash = avatar_hash
if "header" in request.args:
with open(DATA_FOLDER + "store/header", 'wb') as outfile:
outfile.write(request.args["header"][0])
header_hash = digest(request.args["header"][0])
HashMap().insert(header_hash, DATA_FOLDER + "store/header")
u.header_hash = header_hash
if "pgp_key" in request.args and "signature" in request.args:
p.add_pgp_key(request.args["pgp_key"][0], request.args["signature"][0],
KeyChain().guid.encode("hex"))
u.encryption_key = KeyChain().encryption_pubkey
p.update(u)
@POST('^/api/v1/social_accounts')
def add_social_account(self, request):
p = Profile()
if "account_type" in request.args and "username" in request.args and "proof" in request.args:
p.add_social_account(request.args["account_type"][0], request.args["username"][0],
request.args["proof"][0])
@DELETE('^/api/v1/social_accounts')
def delete_social_account(self, request):
p = Profile()
if "account_type" in request.args:
p.remove_social_account(request.args["account_type"][0])
@GET('^/api/v1/get_contract')
def get_contract(self, request):
def parse_contract(contract):
if contract is not None:
request.setHeader('content-type', "application/json")
request.write(json.dumps(contract, indent=4))
request.finish()
else:
request.write(NoResource().render(request))
request.finish()
if "id" in request.args:
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.get_contract(node, unhexlify(request.args["id"][0]))\
.addCallback(parse_contract)
else:
request.write(NoResource().render(request))
request.finish()
try:
with open(DATA_FOLDER + "cache/" + request.args["id"][0], "r") as filename:
contract = json.loads(filename.read(), object_pairs_hook=OrderedDict)
parse_contract(contract)
except Exception:
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
try:
with open(HashMap().get_file(unhexlify(request.args["id"][0])), "r") as filename:
contract = json.loads(filename.read(), object_pairs_hook=OrderedDict)
parse_contract(contract)
except Exception:
parse_contract(None)
else:
request.write(NoResource().render(request))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/set_contract')
def set_contract(self, request):
print request
c = Contract()
c.create(
str(request.args["expiration_date"][0]),
request.args["metadata_category"][0],
request.args["title"][0],
request.args["description"][0],
request.args["currency_code"][0],
request.args["price"][0],
request.args["process_time"][0],
True if "nsfw" in request.args else False,
request.args["shipping_origin"][0],
request.args["ships_to"],
est_delivery_domestic=request.args["est_delivery_domestic"][0],
est_delivery_international=request.args["est_delivery_international"][0],
shipping_currency_code=request.args["shipping_currency_code"][0],
shipping_domestic=request.args["shipping_domestic"][0],
shipping_international=request.args["shipping_international"][0],
keywords=request.args["keywords"] if "keywords" in request.args else None,
category=request.args["category"][0] if request.args["category"][0] is not "" else None,
condition=request.args["condition"][0] if request.args["condition"][0] is not "" else None,
sku=request.args["sku"][0] if request.args["sku"][0] is not "" else None,
images=request.args["images"],
free_shipping=True if "free_shipping" in request.args else False)
for keyword in request.args["keywords"]:
self.kserver.set(keyword.lower(), c.get_contract_id(), self.kserver.node.getProto().SerializeToString())
@DELETE('^/api/v1/delete_contract')
def delete_contract(self, request):
if "id" in request.args:
c = Contract(hash_value=unhexlify(request.args["id"][0]))
for keyword in c.contract["vendor_offer"]["listing"]["item"]["keywords"]:
self.kserver.delete(keyword.lower(), c.get_contract_id(),
KeyChain().signing_key.sign(c.get_contract_id())[:64])
c.delete()
| {
"repo_name": "the9ull/OpenBazaar-Server",
"path": "restapi.py",
"copies": "1",
"size": "17809",
"license": "mit",
"hash": -3690080829891478500,
"line_mean": 43.0816831683,
"line_max": 116,
"alpha_frac": 0.5298444607,
"autogenerated": false,
"ratio": 4.340482573726542,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5370327034426542,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import json
import random
from twisted.internet import reactor
from autobahn.twisted.websocket import WebSocketClientFactory, \
WebSocketClientProtocol, \
connectWS
from dht.utils import digest
class BroadcastClientProtocol(WebSocketClientProtocol):
"""
Use for testing websocket api
"""
def sendHello(self):
request = {
"request": {
"api": "v1",
"id": digest(random.getrandbits(128)).encode("hex"),
"command": "get_homepage_listings",
"keyword": "rustic",
"message": "Hello World!",
"subject": "yo!",
"handle": "@vintage",
"guid": "5aef2616b37496d65e06f8413724167811756af5",
"message_type": "CHAT",
"recipient_key": "769fd0d4f24cdeef820c28dc1df71d3b47ccf2403c8e205dfb89b21fee61c673"
}
}
self.sendMessage(json.dumps(request, indent=4))
def onOpen(self):
self.sendHello()
def onMessage(self, payload, isBinary):
print payload
if __name__ == '__main__':
factory = WebSocketClientFactory("ws://127.0.0.1:18466")
factory.protocol = BroadcastClientProtocol
connectWS(factory)
reactor.run()
| {
"repo_name": "the9ull/OpenBazaar-Server",
"path": "wsclient.py",
"copies": "1",
"size": "1286",
"license": "mit",
"hash": -6657294416645705000,
"line_mean": 27.5777777778,
"line_max": 99,
"alpha_frac": 0.5917573872,
"autogenerated": false,
"ratio": 3.7823529411764705,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4874110328376471,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import json
import time
from interfaces import MessageListener, NotificationListener
from zope.interface import implements
from db.datastore import MessageStore, NotificationStore, FollowData
from protos.objects import Plaintext_Message, Following
class MessageListenerImpl(object):
implements(MessageListener)
def __init__(self, web_socket_factory):
self.ws = web_socket_factory
self.db = MessageStore()
def notify(self, plaintext, signature):
self.db.save_message(plaintext.sender_guid, plaintext.handle, plaintext.signed_pubkey,
plaintext.encryption_pubkey, plaintext.subject,
Plaintext_Message.Type.Name(plaintext.type), plaintext.message,
plaintext.avatar_hash, plaintext.timestamp, signature, False)
# TODO: should probably resolve the handle and make sure it matches the guid so the sender can't spoof it
message_json = {
"message": {
"sender": plaintext.sender_guid.encode("hex"),
"subject": plaintext.subject,
"message_type": Plaintext_Message.Type.Name(plaintext.type),
"message": plaintext.message,
"timestamp": plaintext.timestamp,
"avatar_hash": plaintext.avatar_hash.encode("hex"),
"encryption_key": plaintext.encryption_pubkey.encode("hex")
}
}
if plaintext.handle:
message_json["message"]["handle"] = plaintext.handle
self.ws.push(json.dumps(message_json, indent=4))
class NotificationListenerImpl(object):
implements(NotificationListener)
def __init__(self, web_socket_factory):
self.ws = web_socket_factory
def notify(self, guid, message):
# pull the metadata for this node from the db
f = Following()
ser = FollowData().get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == guid:
avatar_hash = user.metadata.avatar_hash
handle = user.metadata.handle
timestamp = int(time.time())
NotificationStore().save_notification(guid, handle, message, timestamp, avatar_hash)
notification_json = {
"notification": {
"guid": guid.encode("hex"),
"message": message,
"timestamp": timestamp,
"avatar_hash": avatar_hash.encode("hex")
}
}
if handle:
notification_json["notification"]["handle"] = handle
self.ws.push(json.dumps(notification_json, indent=4))
| {
"repo_name": "the9ull/OpenBazaar-Server",
"path": "market/listeners.py",
"copies": "2",
"size": "2721",
"license": "mit",
"hash": -7435755596714566000,
"line_mean": 39.0147058824,
"line_max": 113,
"alpha_frac": 0.6034546123,
"autogenerated": false,
"ratio": 4.596283783783784,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00154947149292001,
"num_lines": 68
} |
__author__ = 'chris'
import json
import time
from interfaces import MessageListener, NotificationListener
from zope.interface import implements
from protos.objects import Plaintext_Message, Following
class MessageListenerImpl(object):
implements(MessageListener)
def __init__(self, web_socket_factory, database):
self.ws = web_socket_factory
self.db = database.MessageStore()
def notify(self, plaintext, signature):
self.db.save_message(plaintext.sender_guid, plaintext.handle, plaintext.signed_pubkey,
plaintext.encryption_pubkey, plaintext.subject,
Plaintext_Message.Type.Name(plaintext.type), plaintext.message,
plaintext.avatar_hash, plaintext.timestamp, signature, False)
# TODO: should probably resolve the handle and make sure it matches the guid so the sender can't spoof it
message_json = {
"message": {
"sender": plaintext.sender_guid.encode("hex"),
"subject": plaintext.subject,
"message_type": Plaintext_Message.Type.Name(plaintext.type),
"message": plaintext.message,
"timestamp": plaintext.timestamp,
"avatar_hash": plaintext.avatar_hash.encode("hex"),
"encryption_key": plaintext.encryption_pubkey.encode("hex")
}
}
if plaintext.handle:
message_json["message"]["handle"] = plaintext.handle
self.ws.push(json.dumps(message_json, indent=4))
class NotificationListenerImpl(object):
implements(NotificationListener)
def __init__(self, web_socket_factory, database):
self.ws = web_socket_factory
self.db = database
def notify(self, guid, message):
# pull the metadata for this node from the db
f = Following()
ser = self.db.FollowData().get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == guid:
avatar_hash = user.metadata.avatar_hash
handle = user.metadata.handle
timestamp = int(time.time())
self.db.NotificationStore().save_notification(guid, handle, message, timestamp, avatar_hash)
notification_json = {
"notification": {
"guid": guid.encode("hex"),
"message": message,
"timestamp": timestamp,
"avatar_hash": avatar_hash.encode("hex")
}
}
if handle:
notification_json["notification"]["handle"] = handle
self.ws.push(json.dumps(notification_json, indent=4))
| {
"repo_name": "melpomene/OpenBazaar-Server",
"path": "market/listeners.py",
"copies": "3",
"size": "2724",
"license": "mit",
"hash": -1609079314942202400,
"line_mean": 39.0588235294,
"line_max": 113,
"alpha_frac": 0.5991189427,
"autogenerated": false,
"ratio": 4.555183946488294,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6654302889188294,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import json
import time
import bitcoin
import nacl.signing
import nacl.encoding
from keyutils.guid import GUID
from nacl.public import PrivateKey
from txrestapi.resource import APIResource
from txrestapi.methods import GET
from twisted.web import server
from twisted.internet.defer import Deferred
from twisted.web.server import Site
from twisted.internet import reactor
class KeyChain(object):
def __init__(self, database):
self.db = database.KeyStore()
guid_keys = self.db.get_key("guid")
if guid_keys is None:
self.create_keychain()
else:
g = GUID.from_privkey(guid_keys[0])
self.guid = g.guid
self.guid_privkey = g.privkey
self.signing_key = nacl.signing.SigningKey(self.guid_privkey)
self.guid_signed_pubkey = g.signed_pubkey
# pylint: disable=W0633
self.bitcoin_master_privkey, self.bitcoin_master_pubkey = self.db.get_key("bitcoin")
self.encryption_key = PrivateKey(self.guid_privkey)
self.encryption_pubkey = self.encryption_key.public_key.encode()
def create_keychain(self):
"""
The guid generation can take a while. While it's doing that we will
open a port to allow a UI to connect and listen for generation to
complete.
"""
print "Generating GUID, this may take a few minutes..."
d = Deferred()
api = GUIDGenerationListener(d)
site = Site(api, timeout=None)
connector = reactor.listenTCP(18470, site, interface="127.0.0.1")
start = time.time()
g = GUID()
d.callback((round(time.time() - start, 2), connector))
self.guid = g.guid
self.guid_privkey = g.privkey
self.signing_key = nacl.signing.SigningKey(self.guid_privkey)
self.guid_signed_pubkey = g.signed_pubkey
self.db.set_key("guid", self.guid_privkey, self.guid_signed_pubkey)
self.bitcoin_master_privkey = bitcoin.bip32_master_key(bitcoin.sha256(self.guid_privkey))
self.bitcoin_master_pubkey = bitcoin.bip32_privtopub(self.bitcoin_master_privkey)
self.db.set_key("bitcoin", self.bitcoin_master_privkey, self.bitcoin_master_pubkey)
self.encryption_key = PrivateKey(self.guid_privkey)
self.encryption_pubkey = self.encryption_key.public_key.encode()
class GUIDGenerationListener(APIResource):
def __init__(self, deffered):
self.defferred = deffered
APIResource.__init__(self)
@GET('^/api/v1/guid_generation')
def guid_generation(self, request):
"""
A long polling GET which returns when the guid creation is finished.
"""
def notify(resp):
request.write(json.dumps({"success": True, "GUID generation time": resp[0]}, indent=4))
request.finish()
resp[1].stopListenting()
self.defferred.addCallback(notify)
return server.NOT_DONE_YET
| {
"repo_name": "hauxir/OpenBazaar-Server",
"path": "keyutils/keys.py",
"copies": "1",
"size": "2986",
"license": "mit",
"hash": 4596344358661504500,
"line_mean": 36.325,
"line_max": 99,
"alpha_frac": 0.6500334896,
"autogenerated": false,
"ratio": 3.7654476670870114,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4915481156687011,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import json
import time
import os
import pickle
from binascii import unhexlify
from collections import OrderedDict
from txrestapi.resource import APIResource
from txrestapi.methods import GET, POST, DELETE
from twisted.web import server
from twisted.web.resource import NoResource
from twisted.web import http
from twisted.internet import defer, reactor
from twisted.protocols.basic import FileSender
from constants import DATA_FOLDER
from protos.countries import CountryCode
from protos import objects
from keyutils.keys import KeyChain
from dht.utils import digest
from market.profile import Profile
from market.contracts import Contract
from net.upnp import PortMapper
DEFAULT_RECORDS_COUNT = 20
DEFAULT_RECORDS_OFFSET = 0
def str_to_bool(s):
if s.lower() == 'true':
return True
elif s.lower() == 'false':
return False
else:
raise ValueError
class OpenBazaarAPI(APIResource):
"""
This RESTful API allows clients to pull relevant data from the
OpenBazaar daemon for use in a GUI or other application.
"""
def __init__(self, mserver, kserver, protocol):
self.mserver = mserver
self.kserver = kserver
self.protocol = protocol
self.db = mserver.db
self.keychain = KeyChain(self.db)
APIResource.__init__(self)
@GET('^/api/v1/get_image')
def get_image(self, request):
@defer.inlineCallbacks
def _showImage(resp=None):
@defer.inlineCallbacks
def _setContentDispositionAndSend(file_path, extension, content_type):
request.setHeader('content-disposition', 'filename="%s.%s"' % (file_path, extension))
request.setHeader('content-type', content_type)
f = open(file_path, "rb")
yield FileSender().beginFileTransfer(f, request)
f.close()
defer.returnValue(0)
if os.path.exists(image_path):
yield _setContentDispositionAndSend(image_path, "jpg", "image/jpeg")
else:
request.setResponseCode(http.NOT_FOUND)
request.write("No such image '%s'" % request.path)
request.finish()
if "hash" in request.args and len(request.args["hash"][0]) == 40:
if self.db.HashMap().get_file(request.args["hash"][0]) is not None:
image_path = self.db.HashMap().get_file(request.args["hash"][0])
else:
image_path = DATA_FOLDER + "cache/" + request.args["hash"][0]
if not os.path.exists(image_path) and "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.get_image(node, unhexlify(request.args["hash"][0])).addCallback(_showImage)
else:
_showImage()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
_showImage()
else:
request.write(NoResource().render(request))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/profile')
def get_profile(self, request):
def parse_profile(profile):
if profile is not None:
mods = []
for mod in profile.moderator_list:
mods.append(mod.encode("hex"))
profile_json = {
"profile": {
"name": profile.name,
"location": str(CountryCode.Name(profile.location)),
"encryption_key": profile.encryption_key.public_key.encode("hex"),
"nsfw": profile.nsfw,
"vendor": profile.vendor,
"moderator": profile.moderator,
"moderator_list": mods,
"handle": profile.handle,
"about": profile.about,
"short_description": profile.short_description,
"website": profile.website,
"email": profile.email,
"primary_color": profile.primary_color,
"secondary_color": profile.secondary_color,
"background_color": profile.background_color,
"text_color": profile.text_color,
"pgp_key": profile.pgp_key.public_key,
"avatar_hash": profile.avatar_hash.encode("hex"),
"header_hash": profile.header_hash.encode("hex"),
"social_accounts": {}
}
}
if "guid" in request.args:
profile_json["profile"]["guid"] = request.args["guid"][0]
else:
profile_json["profile"]["guid"] = self.keychain.guid.encode("hex")
for account in profile.social:
profile_json["profile"]["social_accounts"][str(
objects.Profile.SocialAccount.SocialType.Name(account.type)).lower()] = {
"username": account.username,
"proof_url": account.proof_url
}
request.setHeader('content-type', "application/json")
request.write(json.dumps(profile_json, indent=4))
request.finish()
else:
request.write(json.dumps({}))
request.finish()
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.get_profile(node).addCallback(parse_profile)
else:
request.write(json.dumps({}))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
parse_profile(Profile(self.db).get())
return server.NOT_DONE_YET
@GET('^/api/v1/get_listings')
def get_listings(self, request):
def parse_listings(listings):
if listings is not None:
response = {"listings": []}
for l in listings.listing:
listing_json = {
"title": l.title,
"contract_hash": l.contract_hash.encode("hex"),
"thumbnail_hash": l.thumbnail_hash.encode("hex"),
"category": l.category,
"price": l.price,
"currency_code": l.currency_code,
"nsfw": l.nsfw,
"origin": str(CountryCode.Name(l.origin)),
"ships_to": []
}
for country in l.ships_to:
listing_json["ships_to"].append(str(CountryCode.Name(country)))
response["listings"].append(listing_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(response, indent=4))
request.finish()
else:
request.write(json.dumps({}))
request.finish()
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.get_listings(node).addCallback(parse_listings)
else:
request.write(json.dumps({}))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
ser = self.db.ListingsStore().get_proto()
if ser is not None:
l = objects.Listings()
l.ParseFromString(ser)
parse_listings(l)
else:
parse_listings(None)
return server.NOT_DONE_YET
@GET('^/api/v1/get_followers')
def get_followers(self, request):
def parse_followers(followers):
if followers is not None:
response = {"followers": []}
for f in followers.followers:
follower_json = {
"guid": f.guid.encode("hex"),
"handle": f.metadata.handle,
"name": f.metadata.name,
"avatar_hash": f.metadata.avatar_hash.encode("hex"),
"nsfw": f.metadata.nsfw
}
response["followers"].append(follower_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(response, indent=4))
request.finish()
else:
request.write(json.dumps({}))
request.finish()
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.get_followers(node).addCallback(parse_followers)
else:
request.write(json.dumps({}))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
ser = self.db.FollowData().get_followers()
if ser is not None:
f = objects.Followers()
f.ParseFromString(ser)
parse_followers(f)
else:
parse_followers(None)
return server.NOT_DONE_YET
@GET('^/api/v1/get_following')
def get_following(self, request):
def parse_following(following):
if following is not None:
response = {"following": []}
for f in following.users:
user_json = {
"guid": f.guid.encode("hex"),
"handle": f.metadata.handle,
"name": f.metadata.name,
"avatar_hash": f.metadata.avatar_hash.encode("hex"),
"nsfw": f.metadata.nsfw
}
response["following"].append(user_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(response, indent=4))
request.finish()
else:
request.write(json.dumps({}))
request.finish()
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.get_following(node).addCallback(parse_following)
else:
request.write(json.dumps({}))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
ser = self.db.FollowData().get_following()
if ser is not None:
f = objects.Following()
f.ParseFromString(ser)
parse_following(f)
else:
parse_following(None)
return server.NOT_DONE_YET
@POST('^/api/v1/follow')
def follow(self, request):
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.follow(node)
request.write(json.dumps({"success": True}))
request.finish()
else:
request.write(json.dumps({"success": False, "reason": "could not resolve guid"}, indent=4))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
return server.NOT_DONE_YET
@POST('^/api/v1/unfollow')
def unfollow(self, request):
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.unfollow(node)
request.write(json.dumps({"success": True}))
request.finish()
else:
request.write(json.dumps({"success": False, "reason": "could not resolve guid"}, indent=4))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
return server.NOT_DONE_YET
# pylint: disable=R0201
@POST('^/api/v1/profile')
def update_profile(self, request):
try:
p = Profile(self.db)
can_update_profile = (p.get().encryption_key or
("name" in request.args and
"location" in request.args))
if not can_update_profile:
request_dict = {
"success": False,
"reason": "name or location not included"
}
request.write(json.dumps(request_dict, indent=4))
request.finish()
return False
u = objects.Profile()
if "name" in request.args:
u.name = request.args["name"][0]
if "location" in request.args:
# This needs to be formatted. Either here or from the UI.
u.location = CountryCode.Value(request.args["location"][0].upper())
if "handle" in request.args:
u.handle = request.args["handle"][0]
if "about" in request.args:
u.about = request.args["about"][0]
if "short_description" in request.args:
u.short_description = request.args["short_description"][0]
if "nsfw" in request.args:
u.nsfw = str_to_bool(request.args["nsfw"][0])
if "vendor" in request.args:
u.vendor = str_to_bool(request.args["vendor"][0])
if "moderator" in request.args:
u.moderator = str_to_bool(request.args["moderator"][0])
if "moderator_list" in request.args:
p.get().ClearField("moderator_list")
for moderator in request.args["moderator_list"]:
u.moderator_list.append(unhexlify(moderator))
if "website" in request.args:
u.website = request.args["website"][0]
if "email" in request.args:
u.email = request.args["email"][0]
if "primary_color" in request.args:
u.primary_color = int(request.args["primary_color"][0])
if "secondary_color" in request.args:
u.secondary_color = int(request.args["secondary_color"][0])
if "background_color" in request.args:
u.background_color = int(request.args["background_color"][0])
if "text_color" in request.args:
u.text_color = int(request.args["text_color"][0])
if "avatar" in request.args:
u.avatar_hash = unhexlify(request.args["avatar"][0])
if "header" in request.args:
u.header_hash = unhexlify(request.args["header"][0])
if "pgp_key" in request.args and "signature" in request.args:
p.add_pgp_key(request.args["pgp_key"][0], request.args["signature"][0],
self.keychain.guid.encode("hex"))
enc = u.PublicKey()
enc.public_key = self.keychain.encryption_pubkey
enc.signature = self.keychain.signing_key.sign(enc.public_key)[:64]
u.encryption_key.MergeFrom(enc)
p.update(u)
request.write(json.dumps({"success": True}))
request.finish()
self.kserver.node.vendor = p.get().vendor
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/social_accounts')
def add_social_account(self, request):
try:
p = Profile(self.db)
if "account_type" in request.args and "username" in request.args:
p.add_social_account(request.args["account_type"][0], request.args["username"][0],
request.args["proof"][0] if "proof" in request.args else None)
else:
raise Exception("Missing required fields")
request.write(json.dumps({"success": True}))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@DELETE('^/api/v1/social_accounts')
def delete_social_account(self, request):
try:
p = Profile(self.db)
if "account_type" in request.args:
p.remove_social_account(request.args["account_type"][0])
request.write(json.dumps({"success": True}))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/contracts')
def get_contract(self, request):
def parse_contract(contract):
if contract is not None:
request.setHeader('content-type', "application/json")
request.write(json.dumps(contract, indent=4))
request.finish()
else:
request.write(json.dumps({}))
request.finish()
if "id" in request.args and len(request.args["id"][0]) == 40:
if "guid" in request.args and len(request.args["guid"][0]) == 40:
def get_node(node):
if node is not None:
self.mserver.get_contract(node, unhexlify(request.args["id"][0]))\
.addCallback(parse_contract)
else:
request.write(json.dumps({}))
request.finish()
try:
with open(DATA_FOLDER + "cache/" + request.args["id"][0], "r") as filename:
contract = json.loads(filename.read(), object_pairs_hook=OrderedDict)
parse_contract(contract)
except Exception:
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
try:
with open(self.db.HashMap().get_file(request.args["id"][0]), "r") as filename:
contract = json.loads(filename.read(), object_pairs_hook=OrderedDict)
parse_contract(contract)
except Exception:
parse_contract(None)
else:
request.write(json.dumps({}))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/contracts')
def set_contract(self, request):
try:
if "options" in request.args:
options = {}
for option in request.args["options"]:
options[option] = request.args[option]
c = Contract(self.db)
c.create(
str(request.args["expiration_date"][0]),
request.args["metadata_category"][0],
request.args["title"][0],
request.args["description"][0],
request.args["currency_code"][0],
request.args["price"][0],
request.args["process_time"][0],
str_to_bool(request.args["nsfw"][0]),
shipping_origin=request.args["shipping_origin"][0] if "shipping_origin" in request.args else None,
shipping_regions=request.args["ships_to"] if "ships_to" in request.args else None,
est_delivery_domestic=request.args["est_delivery_domestic"][0]
if "est_delivery_domestic" in request.args else None,
est_delivery_international=request.args["est_delivery_international"][0]
if "est_delivery_international" in request.args else None,
terms_conditions=request.args["terms_conditions"][0]
if request.args["terms_conditions"][0] is not "" else None,
returns=request.args["returns"][0] if request.args["returns"][0] is not "" else None,
shipping_currency_code=request.args["shipping_currency_code"][0],
shipping_domestic=request.args["shipping_domestic"][0],
shipping_international=request.args["shipping_international"][0],
keywords=request.args["keywords"] if "keywords" in request.args else None,
category=request.args["category"][0] if request.args["category"][0] is not "" else None,
condition=request.args["condition"][0] if request.args["condition"][0] is not "" else None,
sku=request.args["sku"][0] if request.args["sku"][0] is not "" else None,
images=request.args["images"],
free_shipping=str_to_bool(request.args["free_shipping"][0]),
options=options if "options" in request.args else None,
moderators=request.args["moderators"] if "moderators" in request.args else None)
for keyword in request.args["keywords"]:
self.kserver.set(digest(keyword.lower()), c.get_contract_id(),
self.kserver.node.getProto().SerializeToString())
request.write(json.dumps({"success": True, "id": c.get_contract_id().encode("hex")}))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@DELETE('^/api/v1/contracts')
def delete_contract(self, request):
try:
if "id" in request.args:
file_path = self.db.HashMap().get_file(request.args["id"][0])
with open(file_path, 'r') as filename:
contract = json.load(filename, object_pairs_hook=OrderedDict)
c = Contract(self.db, contract=contract)
if "keywords" in c.contract["vendor_offer"]["listing"]["item"]:
for keyword in c.contract["vendor_offer"]["listing"]["item"]["keywords"]:
self.kserver.delete(keyword.lower(), c.get_contract_id(),
self.keychain.signing_key.sign(c.get_contract_id())[:64])
if "delete_images" in request.args:
c.delete(delete_images=True)
else:
c.delete()
request.write(json.dumps({"success": True}))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/shutdown')
def shutdown(self, request):
try:
with open(DATA_FOLDER + "cache.pickle", 'r') as f:
data = pickle.load(f)
data["shutdown_time"] = time.time()
with open(DATA_FOLDER + "cache.pickle", 'w') as f:
pickle.dump(data, f)
except IOError:
pass
PortMapper().clean_my_mappings(self.kserver.node.port)
self.protocol.shutdown()
reactor.stop()
@POST('^/api/v1/make_moderator')
def make_moderator(self, request):
try:
self.mserver.make_moderator()
request.write(json.dumps({"success": True}))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/unmake_moderator')
def unmake_moderator(self, request):
try:
self.mserver.unmake_moderator()
request.write(json.dumps({"success": True}))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/purchase_contract')
def purchase_contract(self, request):
try:
def handle_response(resp, contract):
if resp:
contract.await_funding(self.mserver.protocol.get_notification_listener(),
self.protocol.blockchain, resp)
request.write(json.dumps({"success": True, "payment_address": payment[0],
"amount": payment[1],
"order_id": c.get_contract_id().encode("hex")},
indent=4))
request.finish()
else:
request.write(json.dumps({"success": False, "reason": "seller rejected contract"}, indent=4))
request.finish()
options = None
if "options" in request.args:
options = {}
for option in request.args["options"]:
options[option] = request.args[option][0]
c = Contract(self.db, hash_value=unhexlify(request.args["id"][0]), testnet=self.protocol.testnet)
payment = c.\
add_purchase_info(int(request.args["quantity"][0]),
request.args["ship_to"][0] if "ship_to" in request.args else None,
request.args["address"][0] if "address" in request.args else None,
request.args["city"][0] if "city" in request.args else None,
request.args["state"][0] if "state" in request.args else None,
request.args["postal_code"][0] if "postal_code" in request.args else None,
request.args["country"][0] if "country" in request.args else None,
request.args["moderator"][0] if "moderator" in request.args else None,
options)
def get_node(node):
if node is not None:
self.mserver.purchase(node, c).addCallback(handle_response, c)
else:
request.write(json.dumps({"success": False, "reason": "unable to reach vendor"}, indent=4))
request.finish()
seller_guid = unhexlify(c.contract["vendor_offer"]["listing"]["id"]["guid"])
self.kserver.resolve(seller_guid).addCallback(get_node)
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/confirm_order')
def confirm_order(self, request):
try:
def respond(success):
if success:
request.write(json.dumps({"success": True}))
request.finish()
else:
request.write(json.dumps({"success": False, "reason": "Failed to send order confirmation"}))
request.finish()
file_path = DATA_FOLDER + "store/listings/in progress/" + request.args["id"][0] + ".json"
with open(file_path, 'r') as filename:
order = json.load(filename, object_pairs_hook=OrderedDict)
c = Contract(self.db, contract=order, testnet=self.protocol.testnet)
c.add_order_confirmation(self.protocol.blockchain,
request.args["payout_address"][0],
comments=request.args["comments"][0] if "comments" in request.args else None,
shipper=request.args["shipper"][0] if "shipper" in request.args else None,
tracking_number=request.args["tracking_number"][0]
if "tracking_number" in request.args else None,
est_delivery=request.args["est_delivery"][0]
if "est_delivery" in request.args else None,
url=request.args["url"][0] if "url" in request.args else None,
password=request.args["password"][0] if "password" in request.args else None)
guid = c.contract["buyer_order"]["order"]["id"]["guid"]
self.mserver.confirm_order(guid, c).addCallback(respond)
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/upload_image')
def upload_image(self, request):
try:
ret = []
if "image" in request.args:
for image in request.args["image"]:
img = image.decode('base64')
hash_value = digest(img).encode("hex")
with open(DATA_FOLDER + "store/media/" + hash_value, 'wb') as outfile:
outfile.write(img)
self.db.HashMap().insert(hash_value, DATA_FOLDER + "store/media/" + hash_value)
ret.append(hash_value)
elif "avatar" in request.args:
avi = request.args["avatar"][0].decode("base64")
hash_value = digest(avi).encode("hex")
with open(DATA_FOLDER + "store/avatar", 'wb') as outfile:
outfile.write(avi)
self.db.HashMap().insert(hash_value, DATA_FOLDER + "store/avatar")
ret.append(hash_value)
elif "header" in request.args:
hdr = request.args["header"][0].decode("base64")
hash_value = digest(hdr).encode("hex")
with open(DATA_FOLDER + "store/header", 'wb') as outfile:
outfile.write(hdr)
self.db.HashMap().insert(hash_value, DATA_FOLDER + "store/header")
ret.append(hash_value)
request.write(json.dumps({"success": True, "image_hashes": ret}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/complete_order')
def complete_order(self, request):
def respond(success):
if success:
request.write(json.dumps({"success": True}))
request.finish()
else:
request.write(json.dumps({"success": False, "reason": "Failed to send receipt to vendor"}))
request.finish()
file_path = DATA_FOLDER + "purchases/in progress/" + request.args["id"][0] + ".json"
with open(file_path, 'r') as filename:
order = json.load(filename, object_pairs_hook=OrderedDict)
c = Contract(self.db, contract=order, testnet=self.protocol.testnet)
c.add_receipt(True,
self.protocol.blockchain,
feedback=request.args["feedback"][0] if "feedback" in request.args else None,
quality=request.args["quality"][0] if "quality" in request.args else None,
description=request.args["description"][0] if "description" in request.args else None,
delivery_time=request.args["delivery_time"][0]
if "delivery_time" in request.args else None,
customer_service=request.args["customer_service"][0]
if "customer_service" in request.args else None,
review=request.args["review"][0] if "review" in request.args else "")
guid = c.contract["vendor_offer"]["listing"]["id"]["guid"]
self.mserver.complete_order(guid, c).addCallback(respond)
return server.NOT_DONE_YET
@POST('^/api/v1/settings')
def set_settings(self, request):
try:
settings = self.db.Settings()
settings.update(
request.args["refund_address"][0],
request.args["currency_code"][0],
request.args["country"][0],
request.args["language"][0],
request.args["time_zone"][0],
1 if str_to_bool(request.args["notifications"][0]) else 0,
json.dumps(request.args["shipping_addresses"] if request.args["shipping_addresses"] != "" else []),
json.dumps(request.args["blocked"] if request.args["blocked"] != "" else []),
request.args["libbitcoin_server"][0],
1 if str_to_bool(request.args["ssl"][0]) else 0,
KeyChain(self.db).guid_privkey.encode("hex"),
request.args["terms_conditions"][0],
request.args["refund_policy"][0]
)
request.write(json.dumps({"success": True}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/settings')
def get_settings(self, request):
settings = self.db.Settings().get()
if settings is None:
request.write(json.dumps({}, indent=4))
request.finish()
else:
settings_json = {
"refund_address": settings[1],
"currency_code": settings[2],
"country": settings[3],
"language": settings[4],
"time_zone": settings[5],
"notifications": True if settings[6] == 1 else False,
"shipping_addresses": json.loads(settings[7]),
"blocked_guids": json.loads(settings[8]),
"libbitcoin_server": settings[9],
"ssl": True if settings[10] == 1 else False,
"seed": settings[11],
"terms_conditions": settings[12],
"refund_policy": settings[13]
}
request.write(json.dumps(settings_json, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/connected_peers')
def get_connected_peers(self, request):
request.write(json.dumps(self.protocol.keys(), indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/routing_table')
def get_routing_table(self, request):
nodes = []
for bucket in self.kserver.protocol.router.buckets:
for node in bucket.nodes.values():
n = {
"guid": node.id.encode("hex"),
"ip": node.ip,
"port": node.port,
"vendor": node.vendor
}
nodes.append(n)
request.write(json.dumps(nodes, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_notifications')
def get_notifications(self, request):
notifications = self.db.NotificationStore().get_notifications()
limit = int(request.args["limit"][0]) if "limit" in request.args else len(notifications)
notification_list = []
for n in notifications[len(notifications) - limit:]:
notification_json = {
"id": n[0],
"guid": n[1],
"handle": n[2],
"type": n[3],
"order_id": n[4],
"title": n[5],
"timestamp": n[6],
"image_hash": n[7].encode("hex"),
"read": False if n[8] == 0 else True
}
notification_list.append(notification_json)
request.write(json.dumps(notification_list, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/mark_notification_as_read')
def mark_notification_as_read(self, request):
try:
for notif_id in request.args["id"]:
self.db.NotificationStore().mark_as_read(notif_id)
request.write(json.dumps({"success": True}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/broadcast')
def broadcast(self, request):
try:
def get_response(num):
request.write(json.dumps({"success": True, "peers reached": num}, indent=4))
request.finish()
self.mserver.broadcast(request.args["message"][0]).addCallback(get_response)
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_chat_messages')
def get_chat_messages(self, request):
messages = self.db.MessageStore().get_messages(request.args["guid"][0], "CHAT")
limit = int(request.args["limit"][0]) if "limit" in request.args else len(messages)
start = int(request.args["start"][0]) if "start" in request.args else 0
message_list = []
for m in messages[::-1][start: start + limit]:
message_json = {
"guid": m[0],
"handle": m[1],
"message": m[6],
"timestamp": m[7],
"avatar_hash": m[8].encode("hex"),
"outgoing": False if m[10] == 0 else True,
"read": False if m[11] == 0 else True
}
message_list.append(message_json)
request.write(json.dumps(message_list, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_chat_conversations')
def get_chat_conversations(self, request):
messages = self.db.MessageStore().get_conversations()
request.write(json.dumps(messages, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/mark_chat_message_as_read')
def mark_chat_message_as_read(self, request):
try:
self.db.MessageStore().mark_as_read(request.args["guid"][0])
request.write(json.dumps({"success": True}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_sales')
def get_sales(self, request):
sales = self.db.Sales().get_all()
sales_list = []
for sale in sales:
sale_json = {
"order_id": sale[0],
"title": sale[1],
"description": sale[2],
"timestamp": sale[3],
"btc_total": sale[4],
"status": sale[5],
"thumbnail_hash": sale[6],
"buyer": sale[7],
"contract_type": sale[8]
}
sales_list.append(sale_json)
request.write(json.dumps(sales_list, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_purchases')
def get_purchases(self, request):
purchases = self.db.Purchases().get_all()
purchases_list = []
for purchase in purchases:
purchase_json = {
"order_id": purchase[0],
"title": purchase[1],
"description": purchase[2],
"timestamp": purchase[3],
"btc_total": purchase[4],
"status": purchase[5],
"thumbnail_hash": purchase[6],
"vendor": purchase[7],
"contract_type": purchase[8]
}
purchases_list.append(purchase_json)
request.write(json.dumps(purchases_list, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/check_for_payment')
def check_for_payment(self, request):
if not self.protocol.blockchain.connected:
request.write(json.dumps({"success": False, "reason": "libbitcoin server offline"}, indent=4))
request.finish()
return server.NOT_DONE_YET
try:
file_path = DATA_FOLDER + "purchases/unfunded/" + request.args["order_id"][0] + ".json"
with open(file_path, 'r') as filename:
order = json.load(filename, object_pairs_hook=OrderedDict)
c = Contract(self.db, contract=order, testnet=self.protocol.testnet)
c.blockchain = self.protocol.blockchain
c.notification_listener = self.mserver.protocol.get_notification_listener()
c.is_purchase = True
addr = c.contract["buyer_order"]["order"]["payment"]["address"]
def history_fetched(ec, history):
if not ec:
# pylint: disable=W0612
# pylint: disable=W0640
for objid, txhash, index, height, value in history:
def cb_txpool(ec, result):
if ec:
self.protocol.blockchain.fetch_transaction(txhash, cb_chain)
else:
c.on_tx_received(None, None, None, None, result)
def cb_chain(ec, result):
if not ec:
c.on_tx_received(None, None, None, None, result)
self.protocol.blockchain.fetch_txpool_transaction(txhash, cb_txpool)
self.protocol.blockchain.fetch_history2(addr, history_fetched)
request.write(json.dumps({"success": True}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
| {
"repo_name": "hauxir/OpenBazaar-Server",
"path": "api/restapi.py",
"copies": "1",
"size": "43099",
"license": "mit",
"hash": 6029408886130872000,
"line_mean": 44.947761194,
"line_max": 115,
"alpha_frac": 0.5220306736,
"autogenerated": false,
"ratio": 4.266382894476341,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5288413568076341,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import json
import time
import random
from interfaces import MessageListener, BroadcastListener, NotificationListener
from zope.interface import implements
from protos.objects import Plaintext_Message, Following
from dht.utils import digest
class MessageListenerImpl(object):
implements(MessageListener)
def __init__(self, web_socket_factory, database):
self.ws = web_socket_factory
self.db = database.MessageStore()
def notify(self, plaintext, signature):
self.db.save_message(plaintext.sender_guid.encode("hex"), plaintext.handle, plaintext.signed_pubkey,
plaintext.encryption_pubkey, plaintext.subject,
Plaintext_Message.Type.Name(plaintext.type), plaintext.message,
plaintext.timestamp, plaintext.avatar_hash, signature, False)
# TODO: should probably resolve the handle and make sure it matches the guid so the sender can't spoof it
message_json = {
"message": {
"sender": plaintext.sender_guid.encode("hex"),
"subject": plaintext.subject,
"message_type": Plaintext_Message.Type.Name(plaintext.type),
"message": plaintext.message,
"timestamp": plaintext.timestamp,
"avatar_hash": plaintext.avatar_hash.encode("hex"),
"encryption_key": plaintext.encryption_pubkey.encode("hex")
}
}
if plaintext.handle:
message_json["message"]["handle"] = plaintext.handle
self.ws.push(json.dumps(message_json, indent=4))
class BroadcastListenerImpl(object):
implements(BroadcastListener)
def __init__(self, web_socket_factory, database):
self.ws = web_socket_factory
self.db = database
def notify(self, guid, message):
# pull the metadata for this node from the db
f = Following()
ser = self.db.FollowData().get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == guid:
avatar_hash = user.metadata.avatar_hash
handle = user.metadata.handle
timestamp = int(time.time())
broadcast_id = digest(random.getrandbits(255)).encode("hex")
self.db.BroadcastStore().save_broadcast(broadcast_id, guid.encode("hex"), handle, message,
timestamp, avatar_hash)
broadcast_json = {
"broadcast": {
"id": broadcast_id,
"guid": guid.encode("hex"),
"handle": handle,
"message": message,
"timestamp": timestamp,
"avatar_hash": avatar_hash.encode("hex")
}
}
self.ws.push(json.dumps(broadcast_json, indent=4))
class NotificationListenerImpl(object):
implements(NotificationListener)
def __init__(self, web_socket_factory, database):
self.ws = web_socket_factory
self.db = database
def notify(self, guid, handle, notif_type, order_id, title, image_hash):
timestamp = int(time.time())
notif_id = digest(random.getrandbits(255)).encode("hex")
self.db.NotificationStore().save_notification(notif_id, guid.encode("hex"), handle, notif_type, order_id,
title, timestamp, image_hash)
notification_json = {
"notification": {
"id": notif_id,
"guid": guid.encode("hex"),
"handle": handle,
"type": notif_type,
"order_id": order_id,
"title": title,
"timestamp": timestamp,
"image_hash": image_hash.encode("hex")
}
}
self.ws.push(json.dumps(notification_json, indent=4))
| {
"repo_name": "hauxir/OpenBazaar-Server",
"path": "market/listeners.py",
"copies": "1",
"size": "3945",
"license": "mit",
"hash": 3976415085491807000,
"line_mean": 38.0594059406,
"line_max": 113,
"alpha_frac": 0.573130545,
"autogenerated": false,
"ratio": 4.452595936794582,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0006976796311143992,
"num_lines": 101
} |
__author__ = 'chris'
import miniupnpc
class PortMapper(object):
"""
UPnP Port Mapping tool, so we don't need to manually forward ports on a
router.
Ideally we'd use a random port within a range of about 1000 possible ports.
Ideally we'd delete the mapping when we shutdown OpenBazaar but that might
not be the case.
Support port mapping for TCP and UDP ports.
Created on Aug 14, 2014
@author: gubatron
"""
DEBUG = False # boolean
upnp = None # miniupnpc.UPnP
OPEN_BAZAAR_DESCRIPTION = 'OpenBazaar Server'
upnp_device_available = False
@staticmethod
def debug(*s):
if PortMapper.DEBUG:
print str(s)
def debug_upnp_values(self):
self.debug('discoverdelay', self.upnp.discoverdelay)
self.debug('lanaddr', self.upnp.lanaddr)
self.debug('multicastif', self.upnp.multicastif)
self.debug('minissdpdsocket', self.upnp.minissdpdsocket)
def debug_addresses(self):
try:
self.debug('local ip address :', self.upnp.lanaddr)
self.debug('external ip address :', self.upnp.externalipaddress())
except Exception:
pass
def __init__(self):
self.upnp = miniupnpc.UPnP()
self.debug('inital(default) values :')
self.debug_upnp_values()
self.upnp.discoverdelay = 200
self.debug('Discovering... delay=%ums' % self.upnp.discoverdelay)
self.debug(self.upnp.discover(), 'device(s) detected')
try:
self.upnp.selectigd()
self.upnp_device_available = True
except Exception as exc:
print 'Exception :', exc
self.upnp_device_available = False
return
# display information about the IGD and the internet connection
self.debug_addresses()
self.debug("Status Info:", self.get_status_info())
self.debug("Connection Type:", self.get_connection_type())
self.debug_upnp_values()
def get_status_info(self):
result = 'n/a'
try:
result = self.upnp.statusinfo()
except Exception:
pass
return result
def get_connection_type(self):
result = 'n/a'
try:
result = self.upnp.connectiontype()
except Exception:
pass
return result
def add_port_mapping(self, external_port, internal_port,
protocol='TCP', ip_to_bind=None):
"""
Valid protocol values are: 'TCP', 'UDP'
Usually you'll pass external_port and internal_port as the same number.
"""
result = False
if self.upnp_device_available:
if protocol not in ('TCP', 'UDP'):
raise Exception(
'PortMapper.add_port_mapping() invalid protocol ' +
'exception \'%s\'' %
str(protocol)
)
if ip_to_bind is None:
ip_to_bind = self.upnp.lanaddr
self.debug(
"INFO: add_port_mapping() -> No alternate ip_to_bind " +
"address passed, using default lan address (",
self.upnp.lanaddr, ")"
)
try:
result = self.upnp.addportmapping(
external_port,
protocol,
ip_to_bind,
internal_port,
PortMapper.OPEN_BAZAAR_DESCRIPTION + ' (' + protocol + ')',
''
)
except Exception:
# ConflictInMappingEntry
result = False
self.debug("add_port_mapping(%s)?:" % str(external_port), result)
return result
def delete_port_mapping(self, port, protocol='TCP'):
result = False
if self.upnp_device_available:
try:
result = self.upnp.deleteportmapping(port, protocol)
self.debug(
"PortMapper.delete_port_mapping(%d, %s):" % (
port, protocol
)
)
self.debug(result)
except Exception:
self.debug(
"Could not delete mapping on port %d protocol %s" % (
port, protocol
)
)
return result
def get_mapping_list(self):
"""Return [PortMappingEntry]."""
mappings = []
if self.upnp_device_available:
i = 0
while True:
port_mapping = self.upnp.getgenericportmapping(i)
if port_mapping is None:
break
port, proto, (ihost, iport), desc, cxx, dxx, exx = port_mapping
mapping = PortMappingEntry(port, proto, ihost, iport, desc, exx)
self.debug(
"port:", port,
desc, ihost,
"iport:", iport,
"c", cxx,
"d", dxx,
"e", exx
)
i += 1
mappings.append(mapping)
return mappings
def clean_my_mappings(self, port):
"""Delete previous OpenBazaar UPnP Port mappings if found."""
if self.upnp_device_available:
mappings = self.get_mapping_list()
for mapping in mappings:
if mapping.description.startswith(PortMapper.OPEN_BAZAAR_DESCRIPTION) \
and mapping.port == port:
self.debug('delete_port_mapping -> Found:', str(mapping))
try:
self.delete_port_mapping(mapping.port, mapping.protocol)
except Exception:
pass
class PortMappingEntry(object):
"""
POPO to represent a port mapping entry;
tuples are evil when used for abstractions.
"""
def __init__(self, port, protocol, internal_host, internal_port,
description, expiration):
self.port = port
self.protocol = protocol
self.internal_host = internal_host
self.internal_port = internal_port
self.description = description
self.expiration = expiration
def __str__(self):
return '{ protocol:' + self.protocol + \
', description: ' + self.description + \
', port: ' + str(self.port) + \
', internal_port: ' + str(self.internal_port) + \
', internal_host: ' + self.internal_host + \
', expiration: ' + str(self.expiration) + \
'}'
def main():
# Test code
PortMapper.DEBUG = True
mapper = PortMapper()
mapper.add_port_mapping(12345, 12345, 'TCP')
mapper.add_port_mapping(12345, 12345, 'UDP')
mappings = mapper.get_mapping_list()
print len(mappings), "mappings"
mapper.clean_my_mappings(12345)
print "---- after deleting the mapping"
mappings = mapper.get_mapping_list()
print len(mappings), "mappings"
print mapper.debug_upnp_values()
if __name__ == '__main__':
main()
| {
"repo_name": "saltduck/OpenBazaar-Server",
"path": "net/upnp.py",
"copies": "7",
"size": "7193",
"license": "mit",
"hash": -5233532053144581000,
"line_mean": 31.8447488584,
"line_max": 87,
"alpha_frac": 0.5249548172,
"autogenerated": false,
"ratio": 4.317527010804322,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8342481828004321,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import mock
import time
import nacl.signing, nacl.encoding, nacl.hash
from binascii import unhexlify
from txrudp import connection, rudp, packet, constants
from twisted.trial import unittest
from twisted.internet import task, reactor, address, udp
from dht.protocol import KademliaProtocol
from dht.utils import digest
from dht.storage import ForgetfulStorage
from dht.node import Node
from protos import message
class KademliaProtocolTest(unittest.TestCase):
def setUp(self):
self.public_ip = '123.45.67.89'
self.port = 12345
self.own_addr = (self.public_ip, self.port)
self.addr1 = ('132.54.76.98', 54321)
self.addr2 = ('231.76.45.89', 15243)
self.clock = task.Clock()
connection.REACTOR.callLater = self.clock.callLater
self.proto_mock = mock.Mock(spec_set=rudp.ConnectionMultiplexer)
self.handler_mock = mock.Mock(spec_set=connection.Handler)
self.con = connection.Connection(
self.proto_mock,
self.handler_mock,
self.own_addr,
self.addr1
)
valid_key = "1a5c8e67edb8d279d1ae32fa2da97e236b95e95c837dc8c3c7c2ff7a7cc29855"
self.signing_key = nacl.signing.SigningKey(valid_key, encoder=nacl.encoding.HexEncoder)
verify_key = self.signing_key.verify_key
signed_pubkey = self.signing_key.sign(str(verify_key))
h = nacl.hash.sha512(signed_pubkey)
self.storage = ForgetfulStorage()
self.node = Node(unhexlify(h[:40]), self.public_ip, self.port, signed_pubkey, True)
self.protocol = KademliaProtocol(self.node, self.storage, 20)
self.handler = self.protocol.RPCHandler(False, 5, self.protocol._outstanding, self.protocol)
self.handler.connection = self.con
transport = mock.Mock(spec_set=udp.Port)
ret_val = address.IPv4Address('UDP', self.public_ip, self.port)
transport.attach_mock(mock.Mock(return_value=ret_val), 'getHost')
self.protocol.makeConnection(transport)
def tearDown(self):
self.con.shutdown()
self.protocol.shutdown()
def test_invalid_datagram(self):
self.assertFalse(self.handler.receive_message("hi"))
self.assertFalse(self.handler.receive_message("hihihihihihihihihihihihihihihihihihihihih"))
def test_rpc_ping(self):
self._connecting_to_connected()
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("PING")
data = m.SerializeToString()
m.arguments.append(self.protocol.sourceNode.getProto().SerializeToString())
expected_message = m.SerializeToString()
self.handler.receive_message(data)
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
m_calls = self.proto_mock.send_datagram.call_args_list
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
received_message = sent_packet.payload
self.assertEqual(received_message, expected_message)
self.assertEqual(len(m_calls), 2)
def test_rpc_store(self):
self._connecting_to_connected()
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("STORE")
m.arguments.extend(["Keyword", "Key", self.protocol.sourceNode.getProto().SerializeToString()])
data = m.SerializeToString()
for i in range(0, 3):
del m.arguments[-1]
m.arguments.append("True")
expected_message = m.SerializeToString()
self.handler.receive_message(data)
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
m_calls = self.proto_mock.send_datagram.call_args_list
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
received_message = sent_packet.payload
self.assertEqual(received_message, expected_message)
self.assertEqual(len(m_calls), 2)
self.assertTrue(self.storage.getSpecific("Keyword", "Key") == self.protocol.sourceNode.getProto().SerializeToString())
def test_rpc_delete(self):
self._connecting_to_connected()
# Set a keyword to store
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("STORE")
m.arguments.extend(["Keyword", "Key", self.protocol.sourceNode.getProto().SerializeToString()])
data = m.SerializeToString()
for i in range(0, 3):
del m.arguments[-1]
m.arguments.append("True")
expected_message1 = m.SerializeToString()
self.handler.receive_message(data)
self.assertTrue(self.storage.getSpecific("Keyword", "Key") == self.protocol.sourceNode.getProto().SerializeToString())
# Test bad signature
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("DELETE")
m.arguments.extend(["Keyword", "Key", "Bad Signature"])
data = m.SerializeToString()
for i in range(0, 3):
del m.arguments[-1]
m.arguments.append("False")
expected_message2 = m.SerializeToString()
self.handler.receive_message(data)
self.assertTrue(self.storage.getSpecific("Keyword", "Key") == self.protocol.sourceNode.getProto().SerializeToString())
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
sent_packets = tuple(
packet.Packet.from_bytes(call[0][0])
for call in self.proto_mock.send_datagram.call_args_list
)
self.assertEqual(sent_packets[0].payload, expected_message1)
self.assertEqual(sent_packets[1].payload, expected_message2)
self.proto_mock.send_datagram.call_args_list = []
# Test good signature
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("DELETE")
m.arguments.extend(["Keyword", "Key", self.signing_key.sign("Key")[:64]])
data = m.SerializeToString()
for i in range(0, 3):
del m.arguments[-1]
m.arguments.append("True")
expected_message3 = m.SerializeToString()
self.handler.receive_message(data)
self.clock.advance(100 * constants.PACKET_TIMEOUT)
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
self.assertEqual(sent_packet.payload, expected_message3)
self.assertTrue(self.storage.getSpecific("Keyword", "Key") is None)
def test_rpc_stun(self):
self._connecting_to_connected()
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("STUN")
data = m.SerializeToString()
m.arguments.extend([self.addr1[0], str(self.addr1[1])])
expected_message = m.SerializeToString()
self.handler.receive_message(data)
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
m_calls = self.proto_mock.send_datagram.call_args_list
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
received_message = sent_packet.payload
self.assertEqual(received_message, expected_message)
self.assertEqual(len(m_calls), 2)
def test_rpc_find_node(self):
self._connecting_to_connected()
node1 = Node(digest("id1"), "127.0.0.1", 12345, digest("key1"))
node2 = Node(digest("id2"), "127.0.0.1", 22222, digest("key2"))
node3 = Node(digest("id3"), "127.0.0.1", 77777, digest("key3"))
self.protocol.router.addContact(node1)
self.protocol.router.addContact(node2)
self.protocol.router.addContact(node3)
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("FIND_NODE")
m.arguments.append(digest("nodetofind"))
data = m.SerializeToString()
del m.arguments[-1]
m.arguments.extend([node3.getProto().SerializeToString(), node2.getProto().SerializeToString(), node1.getProto().SerializeToString()])
expected_message = m.SerializeToString()
self.handler.receive_message(data)
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
m_calls = self.proto_mock.send_datagram.call_args_list
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
received_message = sent_packet.payload
self.assertEqual(received_message, expected_message)
self.assertEqual(len(m_calls), 2)
def test_rpc_find_value(self):
self._connecting_to_connected()
# Set a value to find
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("STORE")
m.arguments.extend(["Keyword", "Key", self.protocol.sourceNode.getProto().SerializeToString()])
data = m.SerializeToString()
self.handler.receive_message(data)
self.assertTrue(self.storage.getSpecific("Keyword", "Key") == self.protocol.sourceNode.getProto().SerializeToString())
# Send the find_value rpc
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("FIND_VALUE")
m.arguments.append("Keyword")
data = m.SerializeToString()
self.handler.receive_message(data)
del m.arguments[-1]
value = message.Value()
value.valueKey = "Key"
value.serializedData = self.protocol.sourceNode.getProto().SerializeToString()
m.arguments.append("value")
m.arguments.append(value.SerializeToString())
expected_message = m.SerializeToString()
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
m_calls = self.proto_mock.send_datagram.call_args_list
sent_packets = tuple(
packet.Packet.from_bytes(call[0][0])
for call in self.proto_mock.send_datagram.call_args_list
)
received_message = sent_packets[1].payload
self.assertEqual(received_message, expected_message)
self.assertEqual(len(m_calls), 3)
def test_rpc_find_without_value(self):
self._connecting_to_connected()
node1 = Node(digest("id1"), "127.0.0.1", 12345, digest("key1"))
node2 = Node(digest("id2"), "127.0.0.1", 22222, digest("key2"))
node3 = Node(digest("id3"), "127.0.0.1", 77777, digest("key3"))
self.protocol.router.addContact(node1)
self.protocol.router.addContact(node2)
self.protocol.router.addContact(node3)
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("FIND_VALUE")
m.arguments.append(digest("Keyword"))
data = m.SerializeToString()
self.handler.receive_message(data)
del m.arguments[-1]
m.arguments.extend([node2.getProto().SerializeToString(), node3.getProto().SerializeToString(), node1.getProto().SerializeToString()])
expected_message = m.SerializeToString()
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
m_calls = self.proto_mock.send_datagram.call_args_list
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
received_message = sent_packet.payload
m = message.Message()
m.ParseFromString(received_message)
self.assertEqual(received_message, expected_message)
self.assertEqual(len(m_calls), 2)
def test_callPing(self):
self._connecting_to_connected()
n = Node(digest("S"), self.addr1[0], self.addr1[1])
self.protocol[self.addr1] = self.con
self.protocol.callPing(n)
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
sent_message = sent_packet.payload
m = message.Message()
m.ParseFromString(sent_message)
self.assertTrue(len(m.messageID) == 20)
self.assertEqual(self.protocol.sourceNode.getProto().guid, m.sender.guid)
self.assertEqual(self.protocol.sourceNode.getProto().signedPublicKey, m.sender.signedPublicKey)
self.assertTrue(m.command == message.PING)
self.assertEqual(self.proto_mock.send_datagram.call_args_list[0][0][1], self.addr1)
def test_callStore(self):
self._connecting_to_connected()
n = Node(digest("S"), self.addr1[0], self.addr1[1])
self.protocol[self.addr1] = self.con
self.protocol.callStore(n, digest("Keyword"), digest("Key"), self.protocol.sourceNode.getProto().SerializeToString())
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
sent_message = sent_packet.payload
m = message.Message()
m.ParseFromString(sent_message)
self.assertTrue(len(m.messageID) == 20)
self.assertEqual(self.protocol.sourceNode.getProto().guid, m.sender.guid)
self.assertEqual(self.protocol.sourceNode.getProto().signedPublicKey, m.sender.signedPublicKey)
self.assertTrue(m.command == message.STORE)
self.assertEqual(self.proto_mock.send_datagram.call_args_list[0][0][1], self.addr1)
self.assertEqual(m.arguments[0], digest("Keyword"))
self.assertEqual(m.arguments[1], digest("Key"))
self.assertEqual(m.arguments[2], self.protocol.sourceNode.getProto().SerializeToString())
def test_callFindValue(self):
self._connecting_to_connected()
n = Node(digest("S"), self.addr1[0], self.addr1[1])
self.protocol[self.addr1] = self.con
keyword = Node(digest("Keyword"))
self.protocol.callFindValue(n, keyword)
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
sent_message = sent_packet.payload
m = message.Message()
m.ParseFromString(sent_message)
self.assertTrue(len(m.messageID) == 20)
self.assertEqual(self.protocol.sourceNode.getProto().guid, m.sender.guid)
self.assertEqual(self.protocol.sourceNode.getProto().signedPublicKey, m.sender.signedPublicKey)
self.assertTrue(m.command == message.FIND_VALUE)
self.assertEqual(self.proto_mock.send_datagram.call_args_list[0][0][1], self.addr1)
self.assertEqual(m.arguments[0], keyword.id)
def test_callFindNode(self):
self._connecting_to_connected()
n = Node(digest("S"), self.addr1[0], self.addr1[1])
self.protocol[self.addr1] = self.con
keyword = Node(digest("nodetofind"))
self.protocol.callFindNode(n, keyword)
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
sent_message = sent_packet.payload
m = message.Message()
m.ParseFromString(sent_message)
self.assertTrue(len(m.messageID) == 20)
self.assertEqual(self.protocol.sourceNode.getProto().guid, m.sender.guid)
self.assertEqual(self.protocol.sourceNode.getProto().signedPublicKey, m.sender.signedPublicKey)
self.assertTrue(m.command == message.FIND_NODE)
self.assertEqual(self.proto_mock.send_datagram.call_args_list[0][0][1], self.addr1)
self.assertEqual(m.arguments[0], keyword.id)
def test_callDelete(self):
self._connecting_to_connected()
n = Node(digest("S"), self.addr1[0], self.addr1[1])
self.protocol[self.addr1] = self.con
self.protocol.callDelete(n, digest("Keyword"), digest("Key"), digest("Signature"))
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
sent_message = sent_packet.payload
m = message.Message()
m.ParseFromString(sent_message)
self.assertEqual(self.proto_mock.send_datagram.call_args_list[0][0][1], self.addr1)
self.assertTrue(len(m.messageID) == 20)
self.assertEqual(self.protocol.sourceNode.getProto().guid, m.sender.guid)
self.assertEqual(self.protocol.sourceNode.getProto().signedPublicKey, m.sender.signedPublicKey)
self.assertTrue(m.command == message.DELETE)
self.assertEqual(m.arguments[0], digest("Keyword"))
self.assertEqual(m.arguments[1], digest("Key"))
self.assertEqual(m.arguments[2], digest("Signature"))
def test_acceptResponse(self):
self._connecting_to_connected()
def handle_response(resp):
self.assertTrue(resp[0])
self.assertEqual(resp[1][0], self.protocol.sourceNode.id)
n = Node(digest("S"), self.addr1[0], self.addr1[1])
self.protocol[self.addr1] = self.con
d = self.protocol.callPing(n)
self.clock.advance(1)
connection.REACTOR.runUntilCurrent()
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
sent_message = sent_packet.payload
m = message.Message()
m.ParseFromString(sent_message)
timeout = reactor.callLater(5, self.protocol._timeout, m.messageID)
self.handler._outstanding[m.messageID] = (d, timeout)
m.arguments.append(self.protocol.sourceNode.id)
self.handler.receive_message(m.SerializeToString())
return d.addCallback(handle_response)
def test_unknownRPC(self):
self.assertFalse(self.handler._acceptRequest(digest("msgid"), "unknown", [digest("argument")], Node(digest("nodeid"))))
def test_timeout(self):
self._connecting_to_connected()
self.protocol[self.addr1] = self.con
def test_remove_outstanding():
self.assertTrue(len(self.protocol._outstanding) == 0)
def test_deffered(d):
self.assertFalse(d[0])
test_remove_outstanding()
n = Node(digest("S"), self.addr1[0], self.addr1[1])
d = self.protocol.callPing(n)
self.clock.advance(6)
return d.addCallback(test_deffered)
def test_transferKeyValues(self):
self._connecting_to_connected()
self.protocol[self.addr1] = self.con
self.protocol.storage[digest("keyword")] = (digest("key"), self.protocol.sourceNode.getProto().SerializeToString())
self.protocol.transferKeyValues(Node(digest("id"), self.addr1[0], self.addr1[1]))
self.clock.advance(1)
connection.REACTOR.runUntilCurrent()
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
sent_message = sent_packet.payload
x = message.Message()
x.ParseFromString(sent_message)
m = message.Message()
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("STORE")
m.arguments.append(digest("keyword"))
m.arguments.append(digest("key"))
m.arguments.append(self.protocol.sourceNode.getProto().SerializeToString())
self.assertEqual(x.sender, m.sender)
self.assertEqual(x.command, m.command)
self.assertEqual(x.arguments[0], m.arguments[0])
self.assertEqual(x.arguments[1], m.arguments[1])
self.assertEqual(x.arguments[2], m.arguments[2])
def test_refreshIDs(self):
node1 = Node(digest("id1"), "127.0.0.1", 12345, signed_pubkey=digest("key1"))
node2 = Node(digest("id2"), "127.0.0.1", 22222, signed_pubkey=digest("key2"))
node3 = Node(digest("id3"), "127.0.0.1", 77777, signed_pubkey=digest("key3"))
self.protocol.router.addContact(node1)
self.protocol.router.addContact(node2)
self.protocol.router.addContact(node3)
for b in self.protocol.router.buckets:
b.lastUpdated = (time.time() - 5000)
ids = self.protocol.getRefreshIDs()
self.assertTrue(len(ids) == 1)
def _connecting_to_connected(self):
remote_synack_packet = packet.Packet.from_data(
42,
self.con.own_addr,
self.con.dest_addr,
ack=0,
syn=True
)
self.con.receive_packet(remote_synack_packet)
self.clock.advance(0)
connection.REACTOR.runUntilCurrent()
self.next_remote_seqnum = 43
m_calls = self.proto_mock.send_datagram.call_args_list
sent_syn_packet = packet.Packet.from_bytes(m_calls[0][0][0])
seqnum = sent_syn_packet.sequence_number
self.handler_mock.reset_mock()
self.proto_mock.reset_mock()
self.next_seqnum = seqnum + 1 | {
"repo_name": "jorik041/Network",
"path": "dht/tests/test_protocol.py",
"copies": "1",
"size": "21942",
"license": "mit",
"hash": -8378470247242293000,
"line_mean": 42.0254901961,
"line_max": 142,
"alpha_frac": 0.6540880503,
"autogenerated": false,
"ratio": 3.6400132714001328,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47941013217001327,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import nacl.signing
import nacl.hash
import guidc
from binascii import hexlify, unhexlify
class GUID:
def __init__(self, use_C_lib=False):
if use_C_lib:
self.privkey = unhexlify(guidc.generate())
self.signing_key = nacl.signing.SigningKey(self.privkey)
verify_key = self.signing_key.verify_key
signed = self.signing_key.sign(str(verify_key))
h = nacl.hash.sha512(signed)
self.signed_pubkey = signed
self.guid = unhexlify(h[:40])
else:
self.privkey = self.generate()
def generate(self):
def testpow(pow):
return True if int(pow, 16) < 50 else False
valid_pow = False
while not valid_pow:
signing_key = nacl.signing.SigningKey.generate()
verify_key = signing_key.verify_key
signed = signing_key.sign(str(verify_key))
h = nacl.hash.sha512(signed)
pow = h[64:128]
valid_pow = testpow(pow[:6])
self.signing_key = signing_key
self.guid = unhexlify(h[:40])
self.signed_pubkey = signed
return signing_key.encode()
def __str__(self):
return "privkey: %s\nsigned pubkey: %s\nguid: %s" % (hexlify(self.privkey), hexlify(self.signed_pubkey), hexlify(self.guid))
| {
"repo_name": "jorik041/Network",
"path": "guidutils/guid.py",
"copies": "1",
"size": "1348",
"license": "mit",
"hash": 3212262410237796000,
"line_mean": 33.5641025641,
"line_max": 132,
"alpha_frac": 0.5853115727,
"autogenerated": false,
"ratio": 3.5104166666666665,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45957282393666665,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import os
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
def get(key, default):
return getattr(settings, key, default)
WOOEY_FILE_DIR = get('WOOEY_FILE_DIR', 'wooey_files')
WOOEY_SCRIPT_DIR = get('WOOEY_SCRIPT_DIR', 'wooey_scripts')
WOOEY_CELERY = get('WOOEY_CELERY', True)
WOOEY_CELERY_TASKS = get('WOOEY_CELERY_TASKS', 'wooey.tasks')
WOOEY_ALLOW_ANONYMOUS = get('WOOEY_ALLOW_ANONYMOUS', True)
WOOEY_AUTH = get('WOOEY_AUTH', True)
WOOEY_ALLOW_REGISTRATION = get('WOOEY_ALLOW_REGISTRATION', True)
WOOEY_LOGIN_URL = get('WOOEY_LOGIN_URL', settings.LOGIN_URL)
WOOEY_REGISTER_URL = get('WOOEY_REGISTER_URL', '/accounts/register/')
WOOEY_SHOW_LOCKED_SCRIPTS = get('WOOEY_SHOW_LOCKED_SCRIPTS', True)
WOOEY_EPHEMERAL_FILES = get('WOOEY_EPHEMERAL_FILES', False)
WOOEY_DEFAULT_SCRIPT_GROUP = get('WOOEY_DEFAULT_SCRIPT_GROUP', _('Scripts'))
WOOEY_SITE_NAME = get('WOOEY_SITE_NAME', _('Wooey!'))
WOOEY_SITE_TAG = get('WOOEY_SITE_TAG', _('A web UI for Python scripts'))
| {
"repo_name": "hottwaj/Wooey",
"path": "wooey/settings.py",
"copies": "1",
"size": "1030",
"license": "bsd-3-clause",
"hash": 8060846727601915000,
"line_mean": 41.9166666667,
"line_max": 76,
"alpha_frac": 0.7252427184,
"autogenerated": false,
"ratio": 2.581453634085213,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.38066963524852127,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import os
import sys
from django.core.management.base import BaseCommand, CommandError
from django.core.files import File
from django.conf import settings
from ...backend.utils import add_wooey_script, get_storage, default_storage
from ... import settings as wooey_settings
class Command(BaseCommand):
help = 'Adds a script to Wooey'
def add_arguments(self, parser):
parser.add_argument('script', type=str, help='A script or folder of scripts to add to Wooey.')
parser.add_argument('--group',
dest='group',
default='Wooey Scripts',
help='The name of the group to create scripts under. Default: Wooey Scripts')
def handle(self, *args, **options):
script = options.get('script')
if not script:
if len(args):
script = args[0]
else:
raise CommandError('You must provide a script path or directory containing scripts.')
if not os.path.exists(script):
raise CommandError('{0} does not exist.'.format(script))
group = options.get('group', 'Wooey Scripts')
scripts = [os.path.join(script, i) for i in os.listdir(script)] if os.path.isdir(script) else [script]
converted = 0
for script in scripts:
if script.endswith('.pyc') or '__init__' in script:
continue
if script.endswith('.py'):
sys.stdout.write('Converting {}\n'.format(script))
# copy the script to our storage
with open(script, 'r') as f:
script = default_storage.save(os.path.join(wooey_settings.WOOEY_SCRIPT_DIR, os.path.split(script)[1]), File(f))
if wooey_settings.WOOEY_EPHEMERAL_FILES:
# save it locally as well (the default_storage will default to the remote store)
local_storage = get_storage(local=True)
local_storage.save(os.path.join(wooey_settings.WOOEY_SCRIPT_DIR, os.path.split(script)[1]), File(f))
res = add_wooey_script(script_path=script, group=group)
if res['valid']:
converted += 1
sys.stdout.write('Converted {} scripts\n'.format(converted))
| {
"repo_name": "waytai/Wooey",
"path": "wooey/management/commands/addscript.py",
"copies": "3",
"size": "2288",
"license": "bsd-3-clause",
"hash": 8519321934231547000,
"line_mean": 45.693877551,
"line_max": 131,
"alpha_frac": 0.5992132867,
"autogenerated": false,
"ratio": 4.028169014084507,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.002884327005144264,
"num_lines": 49
} |
__author__ = 'chris'
import os
import sys
from django.core.management.base import BaseCommand, CommandError
from django.core.files import File
from django.core.files.storage import default_storage
from django.conf import settings
from ...backend.utils import add_wooey_script
from ... import settings as wooey_settings
class Command(BaseCommand):
help = 'Adds a script to Wooey'
def add_arguments(self, parser):
parser.add_argument('script', type=str, help='A script or folder of scripts to add to Wooey.')
parser.add_argument('--group',
dest='group',
default='Wooey Scripts',
help='The name of the group to create scripts under. Default: Wooey Scripts')
def handle(self, *args, **options):
script = options.get('script')
if not script:
if len(args):
script = args[0]
else:
raise CommandError('You must provide a script path or directory containing scripts.')
if not os.path.exists(script):
raise CommandError('{0} does not exist.'.format(script))
group = options.get('group', 'Wooey Scripts')
scripts = [os.path.join(script, i) for i in os.listdir(script)] if os.path.isdir(script) else [script]
converted = 0
for script in scripts:
if script.endswith('.pyc') or '__init__' in script:
continue
if script.endswith('.py'):
sys.stdout.write('Converting {}\n'.format(script))
# copy the script to our storage
with open(script, 'r') as f:
script = default_storage.save(os.path.join(wooey_settings.WOOEY_SCRIPT_DIR, os.path.split(script)[1]), File(f))
added, error = add_wooey_script(script=os.path.abspath(os.path.join(settings.MEDIA_ROOT, script)), group=group)
if added:
converted += 1
sys.stdout.write('Converted {} scripts\n'.format(converted))
| {
"repo_name": "wooey/django-djangui",
"path": "wooey/management/commands/addscript.py",
"copies": "1",
"size": "2007",
"license": "bsd-3-clause",
"hash": 4168658650180711000,
"line_mean": 42.6304347826,
"line_max": 131,
"alpha_frac": 0.6123567514,
"autogenerated": false,
"ratio": 4.038229376257545,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.004140092242174437,
"num_lines": 46
} |
__author__ = "chris"
import os
import unittest
import numpy as np
import pickle
from .utils import timer
from .mixins import FileMixins, GaussianMixin
from pyquant import peaks
from pyquant import PEAK_FINDING_DERIVATIVE, PEAK_FINDING_REL_MAX, PEAK_FIT_MODE_FAST
def get_gauss_value(x, amp, mu, std):
return amp * np.exp(-((x - mu) ** 2) / (2 * std ** 2))
class PeakFindingTests(FileMixins, unittest.TestCase):
def test_returns_with_bad_data(self):
params, residual = peaks.findAllPeaks(
np.array([1, 2, 3, 4, 5, 6]), np.array([0, 0, 0, 0, 0, 0])
)
self.assertEqual(len(params), 0)
self.assertEqual(residual, np.inf)
def test_max_peaks(self):
# Regression where relative-max is reporting 2 peaks when max_peaks is set to 1. This occurred
# because we enforced max_peaks for each peak width when using the relative-max setting. Thus,
# the max peak for each peak width was combined to the final peak report. The update was to
# pick the peak_width with the lowest BIC.
with open(os.path.join(self.data_dir, "peak_data.pickle"), "rb") as peak_file:
data = pickle.load(peak_file, encoding="latin1")
x, y = data["max_peaks_relative-max"]
params, residual = peaks.findAllPeaks(
x,
y,
max_peaks=1,
peak_find_method=PEAK_FINDING_REL_MAX,
fit_mode=PEAK_FIT_MODE_FAST,
)
self.assertEqual(len(params), 3)
def test_max_peaks_with_rt_peak_regression(self):
with open(os.path.join(self.data_dir, "peak_data.pickle"), "rb") as peak_file:
data = pickle.load(peak_file, encoding="latin1")
x, y = data["max_peaks_rt-peak-regression"]
params, residual = peaks.findAllPeaks(
x, y, max_peaks=1, rt_peak=360, fit_mode=PEAK_FIT_MODE_FAST
)
np.testing.assert_allclose(params[1], desired=365.78, atol=0.1)
def test_fit_baseline_derivative(self):
with open(os.path.join(self.data_dir, "peak_data.pickle"), "rb") as peak_file:
data = pickle.load(peak_file, encoding="latin1")
x, y = data["max_peaks_rt-peak-regression"]
params, residual = peaks.findAllPeaks(
x,
y,
max_peaks=1,
fit_baseline=True,
rt_peak=328,
peak_find_method="derivative",
fit_mode=PEAK_FIT_MODE_FAST,
)
np.testing.assert_allclose(
params[:3], desired=np.array([1320.60, 330.15, 4.22]), atol=10
)
def test_segmenty_negatives(self):
# Regression where a mostly positive dataset with negatives led to -inf values in the data array
# due to np.max(segment_y) being 0 since all data was negative
with open(os.path.join(self.data_dir, "peak_data.pickle"), "rb") as peak_file:
data = pickle.load(peak_file, encoding="latin1")
x, y = data["invalid_operands"]
params, res = peaks.findAllPeaks(
x,
y,
max_peaks=-1,
bigauss_fit=True,
peak_find_method=PEAK_FINDING_DERIVATIVE,
)
means = params[1::4]
desired = np.array(
[
0.13515435795212014,
0.33,
1.474992882679938,
1.799090776628427,
2.1804381077669395,
2.6350000000000002,
3.227084689771589,
3.617021549048893,
4.903333333333333,
5.296162908137783,
5.8366172292356175,
]
)
np.testing.assert_allclose(means, desired=desired, atol=0.1)
class GaussianTests(GaussianMixin, unittest.TestCase):
@timer
def test_gauss_ndim(self):
assert np.round(self.two_gauss[np.where(self.x == 0)], 2) == np.round(
get_gauss_value(0, self.amp, self.mu, self.std)
+ get_gauss_value(0, self.amp, self.mu2, self.std),
2,
)
@timer
def test_peak_fitting(self):
# first, a simple case
params, residual = peaks.findAllPeaks(self.x, self.one_gauss,)
np.testing.assert_allclose(params, self.one_gauss_params, atol=self.std / 2)
params, residual = peaks.findAllPeaks(
self.x, self.one_gauss, peak_find_method=PEAK_FINDING_DERIVATIVE
)
np.testing.assert_allclose(params, self.one_gauss_params, atol=self.std / 2)
params, residual = peaks.findAllPeaks(self.x, self.two_gauss)
np.testing.assert_allclose(params, self.two_gauss_params, atol=self.std / 2)
params, residual = peaks.findAllPeaks(
self.x, self.two_gauss, peak_find_method=PEAK_FINDING_DERIVATIVE
)
np.testing.assert_allclose(params, self.two_gauss_params, atol=self.std / 2)
params, residual = peaks.findAllPeaks(
self.x,
self.noisy_two_gauss,
rt_peak=None,
filter=True,
max_peaks=30,
bigauss_fit=True,
snr=1,
)
np.testing.assert_allclose(
params[1::4], self.two_gauss_params[1::3], atol=self.std / 2
)
def test_targeted_search(self):
# We should not find anything where there are no peaks
res, residual = peaks.targeted_search(
self.x,
self.two_gauss,
self.x[2],
attempts=2,
peak_finding_kwargs={"max_peaks": 2},
)
self.assertIsNone(res)
# Should find the peak when we are in its area
res, residual = peaks.targeted_search(
self.x, self.two_gauss, self.two_gauss_params[1]
)
self.assertIsNotNone(res)
def test_experimental(self):
# Experimental data
x, y = self.peak_data["offset_fit"]
params, residual = peaks.findAllPeaks(x, y, bigauss_fit=True, filter=True)
np.testing.assert_allclose(
params,
np.array(
[
13219262.587656807,
46.821340819991505,
0.06523272363478014,
0.18374422913588656,
3347200.309180678,
47.497,
0.6166821402545103,
0.3817876338966981,
1880722.1582992678,
48.14756707645761,
0.17537885391522443,
0.4846763157315077,
1473766.5256005626,
49.52607160264086,
0.020199999999999108,
0.22250905781532157,
]
),
# The rtol is 1e-7, so we really don't care about the atol
atol=20,
)
if __name__ == "__main__":
unittest.main()
| {
"repo_name": "Chris7/pyquant",
"path": "tests/test_peaks.py",
"copies": "1",
"size": "6867",
"license": "mit",
"hash": -493664865586904640,
"line_mean": 34.765625,
"line_max": 104,
"alpha_frac": 0.5586136595,
"autogenerated": false,
"ratio": 3.530591259640103,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9585151449868659,
"avg_score": 0.0008106938542888439,
"num_lines": 192
} |
__author__ = 'chris'
import os
import unittest
import numpy as np
import six
import six.moves.cPickle as pickle
from pyquant.tests.utils import timer
from pyquant.tests.mixins import FileMixins, GaussianMixin
from pyquant import peaks
from pyquant import PEAK_FINDING_DERIVATIVE, PEAK_FINDING_REL_MAX, PEAK_FIT_MODE_FAST
def get_gauss_value(x, amp, mu, std):
return amp*np.exp(-(x - mu)**2/(2*std**2))
class PeakFindingTests(FileMixins, unittest.TestCase):
def test_returns_with_bad_data(self):
params, residual = peaks.findAllPeaks(np.array([1,2,3,4,5,6]), np.array([0,0,0,0,0,0]))
self.assertEqual(len(params), 0)
self.assertEqual(residual, np.inf)
def test_max_peaks(self):
# Regression where relative-max is reporting 2 peaks when max_peaks is set to 1. This occurred
# because we enforced max_peaks for each peak width when using the relative-max setting. Thus,
# the max peak for each peak width was combined to the final peak report. The update was to
# pick the peak_width with the lowest BIC.
with open(os.path.join(self.data_dir, 'peak_data.pickle'), 'rb') as peak_file:
data = pickle.load(peak_file, encoding='latin1') if six.PY3 else pickle.load(peak_file)
x, y = data['max_peaks_relative-max']
params, residual = peaks.findAllPeaks(x, y, max_peaks=1, peak_find_method=PEAK_FINDING_REL_MAX, fit_mode=PEAK_FIT_MODE_FAST)
self.assertEqual(len(params), 3)
def test_max_peaks_with_rt_peak_regression(self):
with open(os.path.join(self.data_dir, 'peak_data.pickle'), 'rb') as peak_file:
data = pickle.load(peak_file, encoding='latin1') if six.PY3 else pickle.load(peak_file)
x, y = data['max_peaks_rt-peak-regression']
params, residual = peaks.findAllPeaks(x, y, max_peaks=1, rt_peak=360, fit_mode=PEAK_FIT_MODE_FAST)
np.testing.assert_allclose(params[1], desired=365.78, atol=0.1)
def test_baseline_correction_derivative(self):
with open(os.path.join(self.data_dir, 'peak_data.pickle'), 'rb') as peak_file:
data = pickle.load(peak_file, encoding='latin1') if six.PY3 else pickle.load(peak_file)
x, y = data['max_peaks_rt-peak-regression']
params, residual = peaks.findAllPeaks(
x,
y,
max_peaks=1,
baseline_correction=True,
rt_peak=328,
peak_find_method='derivative',
fit_mode=PEAK_FIT_MODE_FAST,
)
np.testing.assert_allclose(
params[:3],
desired=np.array([1320.60, 330.15, 4.22]),
atol=10
)
def test_segmenty_negatives(self):
# Regression where a mostly positive dataset with negatives led to -inf values in the data array
# due to np.max(segment_y) being 0 since all data was negative
with open(os.path.join(self.data_dir, 'peak_data.pickle'), 'rb') as peak_file:
data = pickle.load(peak_file, encoding='latin1') if six.PY3 else pickle.load(peak_file)
x, y = data['invalid_operands']
params, res = peaks.findAllPeaks(x, y, max_peaks=-1, bigauss_fit=True, peak_find_method=PEAK_FINDING_DERIVATIVE)
means = params[1::4]
desired = np.array([
0.13515435795212014, 0.33, 1.474992882679938, 1.799090776628427, 2.1804381077669395, 2.6350000000000002,
3.227084689771589, 3.617021549048893, 4.903333333333333, 5.296162908137783, 5.8366172292356175
])
np.testing.assert_allclose(means, desired=desired, atol=0.1)
class GaussianTests(GaussianMixin, unittest.TestCase):
@timer
def test_gauss_ndim(self):
assert np.round(self.two_gauss[np.where(self.x==0)],2) == np.round(get_gauss_value(0, self.amp, self.mu, self.std)+get_gauss_value(0, self.amp, self.mu2, self.std),2)
@timer
def test_peak_fitting(self):
# first, a simple case
params, residual = peaks.findAllPeaks(self.x, self.one_gauss, )
np.testing.assert_allclose(params, self.one_gauss_params, atol=self.std / 2)
params, residual = peaks.findAllPeaks(self.x, self.one_gauss, peak_find_method=PEAK_FINDING_DERIVATIVE)
np.testing.assert_allclose(params, self.one_gauss_params, atol=self.std / 2)
params, residual = peaks.findAllPeaks(self.x, self.two_gauss)
np.testing.assert_allclose(params, self.two_gauss_params, atol=self.std / 2)
params, residual = peaks.findAllPeaks(self.x, self.two_gauss, peak_find_method=PEAK_FINDING_DERIVATIVE)
np.testing.assert_allclose(params, self.two_gauss_params, atol=self.std / 2)
params, residual = peaks.findAllPeaks(self.x, self.noisy_two_gauss, rt_peak=None, filter=True, max_peaks=30, bigauss_fit=True, snr=1)
np.testing.assert_allclose(params[1::4], self.two_gauss_params[1::3], atol=self.std/2)
def test_targeted_search(self):
# We should not find anything where there are no peaks
res, residual = peaks.targeted_search(self.x, self.two_gauss, self.x[2], attempts=2, peak_finding_kwargs={'max_peaks': 2})
self.assertIsNone(res)
# Should find the peak when we are in its area
res, residual = peaks.targeted_search(self.x, self.two_gauss, self.two_gauss_params[1])
self.assertIsNotNone(res)
def test_experimental(self):
# Experimental data
x, y = self.peak_data['offset_fit']
params, residual = peaks.findAllPeaks(x, y, bigauss_fit=True, filter=True)
np.testing.assert_allclose(
params,
np.array([
13219262.587656807, 46.821340819991505, 0.06523272363478014, 0.18374422913588656,
3347200.309180678, 47.497, 0.6166821402545103, 0.3817876338966981,
1880722.1582992678, 48.14756707645761, 0.17537885391522443, 0.4846763157315077,
1473766.5256005626, 49.52607160264086, 0.020199999999999108, 0.22250905781532157
]),
atol=10,
)
if __name__ == '__main__':
unittest.main()
| {
"repo_name": "pandeylab/pyquant",
"path": "pyquant/tests/test_peaks.py",
"copies": "1",
"size": "6061",
"license": "mit",
"hash": -2244795943386304500,
"line_mean": 47.1031746032,
"line_max": 174,
"alpha_frac": 0.6563273387,
"autogenerated": false,
"ratio": 3.1584158415841586,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9291737410057034,
"avg_score": 0.004601154045424938,
"num_lines": 126
} |
__author__ = 'chris'
import pickle
import os
import unittest
import numpy as np
import pandas as pd
import six
from pyquant import utils
from pyquant.tests.mixins import GaussianMixin
class UtilsTests(GaussianMixin, unittest.TestCase):
def setUp(self):
super(UtilsTests, self).setUp()
self.base_dir = os.path.split(os.path.abspath(__file__))[0]
self.data_dir = os.path.join(self.base_dir, 'data')
def test_select_window(self):
x = list(range(10))
selection = utils.select_window(x, 0, 3)
self.assertListEqual(selection, [0, 1, 2, 3])
selection = utils.select_window(x, 3, 3)
self.assertListEqual(selection, [0, 1, 2, 3, 4, 5, 6])
selection = utils.select_window(x, 8, 3)
self.assertListEqual(selection, [5, 6, 7, 8, 9])
selection = utils.select_window(x, 8, 20)
self.assertListEqual(selection, x)
# Make sure we don't break if a float is passed
selection = utils.select_window(x, 8, 20.0)
self.assertListEqual(selection, x)
def test_divide_peaks(self):
chunks = utils.divide_peaks(self.one_gauss)
two_gauss_chunks = utils.divide_peaks(self.two_gauss, chunk_factor=1.0)
self.assertEqual(len(chunks), 0)
self.assertEqual(len(two_gauss_chunks), 1)
self.assertEqual(two_gauss_chunks[0], 65)
def test_calculate_theoretical_distribution(self):
peptide = 'PEPTIDE'
pep_comp = utils.calculate_theoretical_distribution(peptide=peptide)
ele_comp = utils.calculate_theoretical_distribution(elemental_composition={'C': 7})
np.testing.assert_almost_equal(pep_comp.values.tolist(), [0.6411550319843632, 0.2662471681269686, 0.07401847648709056, 0.015434213671511215, 0.002681646815294711])
np.testing.assert_almost_equal(ele_comp.values.tolist(), [0.9254949240653104, 0.07205572209608584, 0.002404285974894674])
def test_ml(self):
data = os.path.join(self.data_dir, 'ml_data.tsv')
dat = pd.read_table(data)
labels = ['Heavy', 'Medium', 'Light']
utils.perform_ml(dat, {i: [] for i in labels})
for label1 in labels:
for label2 in labels:
if label1 == label2:
continue
col = '{}/{} Confidence'.format(label1, label2)
self.assertNotEqual(sum(pd.isnull(dat['Heavy/Light Confidence']) == False), 0)
def test_merge_peaks(self):
peaks = {1: {'minima': [0,1,2,4,5], 'peaks': [3]}, 2: {'minima': [0,1,2,4,5], 'peaks': [3]}, 7: {'minima': [0,1,2,4,5], 'peaks': [3]}}
merged = utils.merge_peaks(peaks)
self.assertDictEqual(merged, {7: {'minima': [0, 1, 2, 4, 5], 'peaks': [3]}})
peaks = {1: {'minima': [0,1,2,4,5], 'peaks': [3]}}
merged = utils.merge_peaks(peaks)
self.assertDictEqual(merged, {1: {'minima': [0,1,2,4,5], 'peaks': [3]}})
peaks = {1: {'minima': [0,5], 'peaks': [3,7,8]}, 2: {'minima': [0,5], 'peaks': [3,7]}, 7: {'minima': [0,5], 'peaks': [3,7]}}
merged = utils.merge_peaks(peaks)
self.assertDictEqual(merged, {1: {'minima': [0,5], 'peaks': [3,7,8]}, 7: {'minima': [0,5], 'peaks': [3,7]}})
def test_get_cross_points(self):
y = [1, 1, 1, 1, 1, 1, 1]
self.assertListEqual(utils.get_cross_points(y), [])
y = [1, 1, 1, 1, 1, -1, 1]
self.assertListEqual(utils.get_cross_points(y, pad=False), [4, 5])
self.assertListEqual(utils.get_cross_points(y, pad=True), [0, 4, 5, 6])
y = [1, -1, 1]
self.assertListEqual(utils.get_cross_points(y, pad=False), [0, 1])
self.assertListEqual(utils.get_cross_points(y, pad=True), [0, 1, 2])
y = [1, 1, 1, 1, -1, -1, -1, -1]
self.assertListEqual(utils.get_cross_points(y, pad=True), [0, 3, 7])
def test_find_peaks_derivative(self):
with open(os.path.join(self.data_dir, 'peak_data.pickle'), 'rb') as peak_file:
data = pickle.load(peak_file, encoding='latin1') if six.PY3 else pickle.load(peak_file)
x, y = data['large_range']
peaks = utils.find_peaks_derivative(x, y, smooth=False)
peaks = next(iter(peaks.values()))
np.testing.assert_array_equal(
peaks['peaks'],
np.array([357, 378, 432, 1668, 1755, 1811, 1835, 1912, 2009, 2399, 2577, 2952, 3171])
)
np.testing.assert_array_equal(
peaks['minima'],
np.array([ 337, 366, 366, 423, 423, 667, 1654, 1678, 1732, 1774, 1798,
1825, 1825, 1844, 1901, 1930, 1992, 2024, 2369, 2409, 2543, 2608,
2905, 2996, 3155, 3187]),
)
def test_interpolate_data(self):
y = [0, 0, 0, 6311371.403331924, 24368020.237973947, 33309587.186450623, 0, 0, 22678022.890094325,
12544950.520046625, 9621327.844190728, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
x = [23.6401, 23.6813, 23.7225, 23.7650, 23.8063, 23.8484, 23.8905, 23.9343, 23.9759, 24.0183, 24.0602, 24.1009,
24.1440, 24.1877, 24.2282, 24.2738, 24.3195, 24.3662, 24.4168, 24.4607, 24.5016, 24.5417, 24.5812, 24.6235,
24.7088, 24.7942]
interp_y = utils.interpolate_data(x, y, gap_limit=2)
self.assertNotEqual(interp_y[6], 0)
self.assertNotEqual(interp_y[7], 0)
six.assertCountEqual(self, interp_y[:3], [0,0,0])
interp_y = utils.interpolate_data(x, y, gap_limit=1)
six.assertCountEqual(self, interp_y[6:8], [0,0])
def test_merge_close_peaks(self):
ty = np.array([0, 1, 2, 1, 0, 1, 2, 3, 2, 1, 0, 1, 3, 3])
merged = utils.merge_close_peaks(np.array([7, 12]), ty, distance=6)
np.testing.assert_array_equal(merged, np.array([7, 12]))
merged = utils.merge_close_peaks(np.array([2, 7, 12]), ty, distance=5)
np.testing.assert_array_equal(merged, np.array([2, 7, 12]))
ty = np.array([0, 1, 2, 1, 0, 1, 2, 4, 2, 1, 0, 1, 3, 3])
merged = utils.merge_close_peaks(np.array([2, 7, 12]), ty, distance=6)
np.testing.assert_array_equal(merged, np.array([7]))
merged = utils.merge_close_peaks(np.array([]), ty, distance=6)
np.testing.assert_array_equal(merged, np.array([]))
def test_get_formatted_mass(self):
self.assertEqual(utils.get_formatted_mass('0.123'), utils.get_formatted_mass(0.123))
self.assertEqual(utils.get_formatted_mass('0.12300'), utils.get_formatted_mass(0.123))
self.assertEqual(utils.get_formatted_mass('123.12300'), utils.get_formatted_mass(123.123))
self.assertEqual(utils.get_formatted_mass('123.12300'), utils.get_formatted_mass(123.1230000))
def test_get_scan_resolution(self):
with open(os.path.join(self.data_dir, 'peak_data.pickle'), 'rb') as peak_file:
data = pickle.load(peak_file, encoding='latin1') if six.PY3 else pickle.load(peak_file)
x, y = data['low_res_scan']
scan = pd.Series(y, index=x)
resolution = utils.get_scan_resolution(scan)
self.assertAlmostEqual(int(resolution), 30459)
x = np.array([
805.47264226, 805.47265226, 805.47495304, 805.47739824,
805.47984345, 805.49206968, 805.49451496, 805.49696025,
805.49940556, 805.50185087, 805.50429619, 805.50674153,
805.50918687, 805.51163223, 805.51407748, 805.51652286,
805.51896825, 805.52141365, 805.7635636 , 805.76601011,
805.76845664, 805.77090318, 805.77334973, 805.77579629,
805.77824286, 805.78068944, 805.78313604, 805.78558264,
805.79153526, 805.79398188, 805.79642852, 805.79887517,
805.80132183, 805.8037685 , 805.80621519, 805.80622519,
805.80623519, 805.80661184, 805.80905857, 806.09295462,
806.09540266, 806.0978507 , 806.10029875, 806.10274682,
806.1051949 , 806.10764298, 806.11009108, 806.11253919,
806.11498731, 806.11743545, 806.12539827, 806.12784643,
806.13029459, 806.13274277, 806.13519096, 806.13763916,
806.14008737, 806.14009737, 806.14206697, 806.14451522,
])
y = np.array([
0., 0., 0., 0.,
0., 0., 0., 0.,
0., 0., 0., 0.,
0., 0., 0., 0.,
0., 0., 0., 0.,
0., 0., 0., 720001.9375 ,
3056311.5, 5417428.5, 5494928., 2185260.5,
4099960.25, 6359202.5, 7897908.5, 5956936.5,
3124577.75, 533339., 0., 0.,
0., 0., 0., 0.,
0., 0., 0., 0.,
554049.75, 2336927., 4342714.5, 4671674.,
3267909.5, 2324766.75, 3356381.25, 5258928.5,
6305622.5, 4235590., 2183502.75, 306638.96875,
0., 0., 0., 0.
])
scan = pd.Series(y, index=x)
resolution = utils.get_scan_resolution(scan)
self.assertAlmostEqual(resolution, 90781.241173982111)
if __name__ == '__main__':
unittest.main()
| {
"repo_name": "pandeylab/pyquant",
"path": "pyquant/tests/test_utils.py",
"copies": "1",
"size": "9211",
"license": "mit",
"hash": 8637839358337481000,
"line_mean": 47.7354497354,
"line_max": 171,
"alpha_frac": 0.5697535555,
"autogenerated": false,
"ratio": 2.791212121212121,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3860965676712121,
"avg_score": null,
"num_lines": null
} |
__author__ = "chris"
import pkg_resources # part of setuptools
import argparse
from pythomics.proteomics import config
version = pkg_resources.require("pyquant")[0].version
description = """
This will quantify labeled peaks (such as SILAC) in ms1 spectra. It relies solely on the distance between peaks,
which can correct for errors due to amino acid conversions.
"""
PEAK_RESOLUTION_RT_MODE = "rt"
PEAK_RESOLUTION_COMMON_MODE = "common-peak"
PEAK_FINDING_REL_MAX = "relative-max"
PEAK_FINDING_DERIVATIVE = "derivative"
PEAK_FIT_MODE_FAST = "fast"
PEAK_FIT_MODE_AVERAGE = "average"
PEAK_FIT_MODE_SLOW = "slow"
pyquant_parser = argparse.ArgumentParser(
prog="PyQuant v{}".format(version),
description=description,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
pyquant_parser.add_argument("-p", help="Threads to run", type=int, default=1)
pyquant_parser.add_argument("--theo-xic", help=argparse.SUPPRESS, action="store_true")
raw_group = pyquant_parser.add_argument_group("Raw Data Parameters")
raw_group.add_argument(
"--scan-file",
help="The scan file(s) for the raw data. If not provided, assumed to be in the directory of the processed/tabbed/peaklist file.",
type=argparse.FileType("r"),
nargs="*",
)
raw_group.add_argument(
"--scan-file-dir", help="The directory containing raw data.", type=str
)
raw_group.add_argument(
"--precision",
help="The precision for storing m/z values. Defaults to 6 decimal places.",
type=int,
default=6,
)
raw_group.add_argument(
"--precursor-ppm",
help="The mass accuracy for the first monoisotopic peak in ppm.",
type=float,
default=5,
)
raw_group.add_argument(
"--isotope-ppm",
help="The mass accuracy for the isotopic cluster.",
type=float,
default=2.5,
)
raw_group.add_argument(
"--spread",
help="Assume there is spread of the isotopic label.",
action="store_true",
)
search_group = pyquant_parser.add_argument_group("Search Information")
search_group.add_argument(
"--search-file",
help="A search output or Proteome Discoverer msf file",
type=argparse.FileType("rb"),
required=False,
)
search_group.add_argument(
"--skip",
help="If true, skip scans with missing files in the mapping.",
action="store_true",
)
search_group.add_argument(
"--peptide", help="The peptide(s) to limit quantification to.", type=str, nargs="*"
)
search_group.add_argument(
"--peptide-file",
help="A file of peptide(s) to limit quantification to.",
type=argparse.FileType("r"),
)
search_group.add_argument(
"--scan", help="The scan(s) to limit quantification to.", type=str, nargs="*"
)
replicate_group = pyquant_parser.add_argument_group("Missing Value Analysis")
replicate_group.add_argument(
"--mva", help="Analyze files in 'missing value' mode.", action="store_true"
)
replicate_group.add_argument(
"--rt-window",
help="The maximal deviation of a scan's retention time to be considered for analysis.",
default=0.25,
type=float,
)
label_group = pyquant_parser.add_argument_group("Labeling Information")
label_subgroup = label_group.add_mutually_exclusive_group()
label_subgroup.add_argument(
"--label-scheme",
help="The file corresponding to the labeling scheme utilized.",
type=argparse.FileType("r"),
)
label_subgroup.add_argument(
"--label-method",
help="Predefined labeling schemes to use.",
type=str,
choices=sorted(config.LABEL_SCHEMES.keys()),
)
label_group.add_argument(
"--reference-label",
help="The label to use as a reference (by default all comparisons are taken).",
type=str,
)
tsv_group = pyquant_parser.add_argument_group("Tabbed File Input")
tsv_group.add_argument(
"--tsv",
help="A delimited file containing scan information.",
type=argparse.FileType("r"),
)
tsv_group.add_argument(
"--label",
help="The column indicating the label state of the peptide. If not found, entry assumed to be light variant.",
default="Labeling State",
)
tsv_group.add_argument(
"--peptide-col", help="The column indicating the peptide.", default="Peptide"
)
tsv_group.add_argument(
"--rt", help="The column indicating the retention time.", default="Retention time"
)
tsv_group.add_argument(
"--mz",
help="The column indicating the MZ value of the precursor ion. This is not the MH+.",
default="Light Precursor",
)
tsv_group.add_argument(
"--scan-col",
help="The column indicating the scan corresponding to the ion.",
default="MS2 Spectrum ID",
)
tsv_group.add_argument(
"--charge",
help="The column indicating the charge state of the ion.",
default="Charge",
)
tsv_group.add_argument(
"--source",
help="The column indicating the raw file the scan is contained in.",
default="Raw file",
)
ion_search_group = pyquant_parser.add_argument_group("Targetted Ion Search Parameters")
ion_search_group.add_argument(
"--msn-id",
help="The ms level to search for the ion in. Default: 2 (ms2)",
type=int,
default=2,
)
ion_search_group.add_argument(
"--msn-quant-from",
help="The ms level to quantify values from. i.e. if we are identifying an ion in ms2, we can quantify it in ms1 (or ms2). Default: msn value-1",
type=int,
default=None,
)
ion_search_group.add_argument(
"--msn-ion",
help="M/Z values to search for in the scans. To search for multiple m/z values for a given ion, separate m/z values with a comma.",
nargs="+",
type=str,
)
ion_search_group.add_argument(
"--msn-ion-rt", help="RT values each ion is expected at.", nargs="+", type=float
)
ion_search_group.add_argument(
"--msn-peaklist",
help="A file containing peaks to search for in the scans.",
type=argparse.FileType("rb"),
)
ion_search_group.add_argument(
"--msn-ppm",
help="The error tolerance for identifying the ion(s).",
type=float,
default=200,
)
ion_search_group.add_argument(
"--msn-rt-window",
help="The range of retention times for identifying the ion(s). (ex: 7.54-9.43)",
type=str,
nargs="+",
)
ion_search_group.add_argument(
"--msn-all-scans",
help="Search for the ion across all scans (ie if you have 3 ions, you will have 3 results with one long XIC)",
action="store_true",
)
ion_search_group.add_argument(
"--require-all-ions",
help="If multiple ions are set (in the style of 93.15,105.15), all ions must be found in a scan.",
action="store_true",
)
quant_parameters = pyquant_parser.add_argument_group("Quantification Parameters")
quant_parameters.add_argument(
"--quant-method",
help="The process to use for quantification. Default: Integrate for ms1, sum for ms2+.",
choices=["integrate", "sum"],
default=None,
)
quant_parameters.add_argument(
"--reporter-ion",
help="Indicates that reporter ions are being used. As such, we only analyze a single scan.",
action="store_true",
)
quant_parameters.add_argument(
"--isotopologue-limit",
help="How many isotopologues to quantify",
type=int,
default=-1,
)
quant_parameters.add_argument(
"--overlapping-labels",
help="This declares the mz values of labels will overlap. It is useful for data such as neucode, but not needed for only SILAC labeling.",
action="store_true",
)
quant_parameters.add_argument(
"--labels-needed",
help="How many labels need to be detected to quantify a scan (ie if you have a 2 state experiment and set this to 2, it will only quantify scans where both occur.",
default=1,
type=int,
)
quant_parameters.add_argument(
"--merge-labels", help="Merge labels together to a single XIC.", action="store_true"
)
quant_parameters.add_argument(
"--min-scans",
help="How many quantification scans are needed to quantify a scan.",
default=1,
type=int,
)
quant_parameters.add_argument(
"--min-resolution",
help="The minimal resolving power of a scan to consider for quantification. Useful for skipping low-res scans",
default=0,
type=float,
)
quant_parameters.add_argument(
"--no-mass-accuracy-correction",
help="Disables the mass accuracy correction.",
action="store_true",
)
quant_parameters.add_argument(
"--no-contaminant-detection",
help="Disables routine to check if an ion is a contaminant of a nearby peptide (checks if its a likely isotopologue).",
action="store_true",
)
peak_parameters = pyquant_parser.add_argument_group("Peak Fitting Parameters")
peak_parameters.add_argument(
"--peak-find-method",
help="The method to use to identify peaks within data. For LC-MS, relative-max is usually best. For smooth data, derivative is better.",
type=str,
choices=(PEAK_FINDING_REL_MAX, PEAK_FINDING_DERIVATIVE),
default=PEAK_FINDING_REL_MAX,
)
peak_parameters.add_argument(
"--peak-find-mode",
help="This picks some predefined parameters for various use cases. Fast is good for robust data with few peaks, slow is good for complex data with overlapping peaks of very different size.",
type=str,
choices=(PEAK_FIT_MODE_SLOW, PEAK_FIT_MODE_AVERAGE, PEAK_FIT_MODE_FAST),
default=PEAK_FIT_MODE_AVERAGE,
)
peak_parameters.add_argument(
"--gap-interpolation",
help="This interpolates missing data in scans. The parameter should be a number that is the maximal gap size to fill (ie 2 means a gap of 2 scans). Can be useful for low intensity LC-MS data.",
type=int,
default=0,
)
peak_parameters.add_argument(
"--fit-baseline",
help="Fit a separate line for the baseline of each peak.",
action="store_true",
)
peak_parameters.add_argument(
"--peak-cutoff",
help="The threshold from the initial retention time a peak can fall by before being discarded",
type=float,
default=0.05,
)
peak_parameters.add_argument(
"--max-peaks",
help="The maximal number of peaks to detect per scan. A lower value can help with very noisy data.",
type=int,
default=-1,
)
peak_parameters.add_argument(
"--peaks-n",
help="The number of peaks to report per scan. Useful for ions with multiple elution times.",
type=int,
default=1,
)
peak_parameters.add_argument(
"--no-rt-guide",
help="Do not use the retention time to bias for peaks containing the MS trigger time.",
action="store_true",
)
peak_parameters.add_argument(
"--snr-filter", help="Filter peaks below a given SNR.", type=float, default=0
)
peak_parameters.add_argument(
"--zscore-filter",
help="Peaks below a given z-score are excluded.",
type=float,
default=0,
)
peak_parameters.add_argument(
"--filter-width",
help="The window size for snr/zscore filtering. Default: entire scan",
type=float,
default=0,
)
peak_parameters.add_argument(
"--r2-cutoff",
help="The minimal R^2 for a peak to be kept. Should be a value between 0 and 1",
type=float,
default=None,
)
peak_parameters.add_argument(
"--intensity-filter",
help="Filter peaks whose peak are below a given intensity.",
type=float,
default=0,
)
peak_parameters.add_argument(
"--percentile-filter",
help="Filter peaks whose peak are below a given percentile of the data.",
type=float,
default=0,
)
peak_parameters.add_argument(
"--min-peak-separation",
help="Peaks separated by less than this distance will be combined. For very crisp data, set this to a lower number. (minimal value is 1)",
type=int,
default=5,
)
peak_parameters.add_argument(
"--disable-peak-filtering",
help="This will disable smoothing of data prior to peak finding. If you have very good LC, this may be used to identify small peaks.",
action="store_true",
)
peak_parameters.add_argument(
"--merge-isotopes",
help="Merge Isotopologues together prior to fitting.",
action="store_true",
)
peak_parameters.add_argument(
"--peak-resolution-mode",
help="The method to use to resolve peaks across multiple XICs",
choices=(PEAK_RESOLUTION_RT_MODE, PEAK_RESOLUTION_COMMON_MODE),
type=str,
default="common-peak",
)
# Deprecated parameters
peak_parameters.add_argument(
"--remove-baseline",
help=argparse.SUPPRESS,
action="store_true",
dest="fit_baseline",
)
xic_parameters = pyquant_parser.add_argument_group("XIC Options")
xic_parameters.add_argument(
"--xic-snr",
help="When the SNR of the XIC falls below this, stop searching for more data. Useful for escaping from noisy shoulders and contaminants.",
type=float,
default=1.0,
)
xic_parameters.add_argument(
"--xic-missing-ion-count",
help="This specifies how many consequtive scans an ion can be missing for until it is no longer considered.",
type=int,
default=1,
)
xic_parameters.add_argument(
"--xic-window-size",
help="When the number of scans in a given direction from the initial datapoint of an XIC passes this, stop. Default is -1 (disabled). Useful for removing contaminants",
type=int,
default=-1,
)
xic_parameters.add_argument(
"--xic-smooth",
help="Prior to fitting, smooth data with a Gaussian filter.",
action="store_true",
)
xic_parameters.add_argument(
"--export-msn",
help="This will export spectra of a given MSN that were used to provide the quantification.",
action="store_false",
)
mrm_parameters = pyquant_parser.add_argument_group("SRM/MRM Parameters")
#'A file indicating light and heavy peptide pairs, and optionally the known elution time.'
mrm_parameters.add_argument(
"--mrm-map", help=argparse.SUPPRESS, type=argparse.FileType("r")
)
output_group = pyquant_parser.add_argument_group("Output Options")
output_group.add_argument(
"--debug", help="This will output debug information.", action="store_true"
)
output_group.add_argument(
"--html", help="Output a HTML table summary.", action="store_true"
)
output_group.add_argument(
"--resume",
help="Will resume from the last run. Only works if not directing output to stdout.",
action="store_true",
)
output_group.add_argument(
"--sample",
help="How much of the data to sample. Enter as a decimal (ie 1.0 for everything, 0.1 for 10%%)",
type=float,
default=1.0,
)
output_group.add_argument(
"--disable-stats",
help="Disable confidence statistics on data.",
action="store_true",
)
output_group.add_argument(
"--no-ratios", help="Disable reporting of ratios in output.", action="store_true"
)
output_group.add_argument(
"-o", "--out", nargs="?", help="The prefix for the file output", type=str
)
PER_PEAK = "per-peak"
PER_FILE = "per-file"
PER_ID = "per-id"
spectra_output = pyquant_parser.add_argument_group("Spectra Output Options")
spectra_output.add_argument(
"--export-mzml",
help="Create an mzml file of spectra contained within each peak.",
action="store_true",
)
spectra_output.add_argument(
"--export-mode",
help="How to export the scans. per-peak: A mzML per peak identified. per-id: A mzML per ion identified (each row of the output gets an mzML). per-file: All scans matched per raw file.",
type=str,
default="per-peak",
choices={PER_PEAK, PER_ID, PER_FILE},
)
convenience_group = pyquant_parser.add_argument_group("Convenience Parameters")
convenience_group.add_argument(
"--neucode",
help="This will select parameters specific for neucode. Note: You still must define a labeling scheme.",
action="store_true",
)
convenience_group.add_argument(
"--isobaric-tags",
help="This will select parameters specific for isobaric tag based labeling (TMT/iTRAQ).",
action="store_true",
)
convenience_group.add_argument(
"--ms3",
help="This will select parameters specific for ms3 based quantification.",
action="store_true",
)
convenience_group.add_argument(
"--maxquant",
help="This will select parameters specific for a MaxQuant evidence file.",
action="store_true",
)
convenience_group.add_argument(
"--gcms",
help="This will select parameters specific for ion identification and quantification in GCMS experiments.",
action="store_true",
)
#'This will select parameters specific for Selective/Multiple Reaction Monitoring (SRM/MRM).'
convenience_group.add_argument("--mrm", help=argparse.SUPPRESS, action="store_true")
| {
"repo_name": "Chris7/pyquant",
"path": "pyquant/__init__.py",
"copies": "1",
"size": "16230",
"license": "mit",
"hash": 4728967796479011000,
"line_mean": 32.1901840491,
"line_max": 197,
"alpha_frac": 0.6970425139,
"autogenerated": false,
"ratio": 3.4254959898691433,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9617338016572554,
"avg_score": 0.0010400974393179458,
"num_lines": 489
} |
__author__ = 'chris'
import pkg_resources # part of setuptools
import argparse
from pythomics.proteomics import config
version = pkg_resources.require('pyquant-ms')[0].version
description = """
This will quantify labeled peaks (such as SILAC) in ms1 spectra. It relies solely on the distance between peaks,
which can correct for errors due to amino acid conversions.
"""
PEAK_RESOLUTION_RT_MODE = 'rt'
PEAK_RESOLUTION_COMMON_MODE = 'common-peak'
PEAK_FINDING_REL_MAX = 'relative-max'
PEAK_FINDING_DERIVATIVE = 'derivative'
PEAK_FIT_MODE_FAST = 'fast'
PEAK_FIT_MODE_AVERAGE = 'average'
PEAK_FIT_MODE_SLOW = 'slow'
pyquant_parser = argparse.ArgumentParser(prog='PyQuant v{}'.format(version), description=description, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
pyquant_parser.add_argument('-p', help="Threads to run", type=int, default=1)
pyquant_parser.add_argument('--theo-xic', help=argparse.SUPPRESS, action='store_true')
raw_group = pyquant_parser.add_argument_group("Raw Data Parameters")
raw_group.add_argument('--scan-file', help="The scan file(s) for the raw data. If not provided, assumed to be in the directory of the processed/tabbed/peaklist file.", type=argparse.FileType('r'), nargs='*')
raw_group.add_argument('--scan-file-dir', help="The directory containing raw data.", type=str)
raw_group.add_argument('--precision', help="The precision for storing m/z values. Defaults to 6 decimal places.", type=int, default=6)
raw_group.add_argument('--precursor-ppm', help="The mass accuracy for the first monoisotopic peak in ppm.", type=float, default=5)
raw_group.add_argument('--isotope-ppm', help="The mass accuracy for the isotopic cluster.", type=float, default=2.5)
raw_group.add_argument('--spread', help="Assume there is spread of the isotopic label.", action='store_true')
search_group = pyquant_parser.add_argument_group("Search Information")
search_group.add_argument('--search-file', help='A search output or Proteome Discoverer msf file', type=argparse.FileType('rb'), required=False)
search_group.add_argument('--skip', help="If true, skip scans with missing files in the mapping.", action='store_true')
search_group.add_argument('--peptide', help="The peptide(s) to limit quantification to.", type=str, nargs='*')
search_group.add_argument('--peptide-file', help="A file of peptide(s) to limit quantification to.", type=argparse.FileType('r'))
search_group.add_argument('--scan', help="The scan(s) to limit quantification to.", type=str, nargs='*')
replicate_group = pyquant_parser.add_argument_group("Missing Value Analysis")
replicate_group.add_argument('--mva', help="Analyze files in 'missing value' mode.", action='store_true')
replicate_group.add_argument('--rt-window', help="The maximal deviation of a scan's retention time to be considered for analysis.", default=0.25, type=float)
label_group = pyquant_parser.add_argument_group("Labeling Information")
label_subgroup = label_group.add_mutually_exclusive_group()
label_subgroup.add_argument('--label-scheme', help='The file corresponding to the labeling scheme utilized.', type=argparse.FileType('r'))
label_subgroup.add_argument('--label-method', help='Predefined labeling schemes to use.', type=str, choices=sorted(config.LABEL_SCHEMES.keys()))
label_group.add_argument('--reference-label', help='The label to use as a reference (by default all comparisons are taken).', type=str)
tsv_group = pyquant_parser.add_argument_group('Tabbed File Input')
tsv_group.add_argument('--tsv', help='A delimited file containing scan information.', type=argparse.FileType('r'))
tsv_group.add_argument('--label', help='The column indicating the label state of the peptide. If not found, entry assumed to be light variant.', default='Labeling State')
tsv_group.add_argument('--peptide-col', help='The column indicating the peptide.', default='Peptide')
tsv_group.add_argument('--rt', help='The column indicating the retention time.', default='Retention time')
tsv_group.add_argument('--mz', help='The column indicating the MZ value of the precursor ion. This is not the MH+.', default='Light Precursor')
tsv_group.add_argument('--scan-col', help='The column indicating the scan corresponding to the ion.', default='MS2 Spectrum ID')
tsv_group.add_argument('--charge', help='The column indicating the charge state of the ion.', default='Charge')
tsv_group.add_argument('--source', help='The column indicating the raw file the scan is contained in.', default='Raw file')
ion_search_group = pyquant_parser.add_argument_group('Targetted Ion Search Parameters')
ion_search_group.add_argument('--msn-id', help='The ms level to search for the ion in. Default: 2 (ms2)', type=int, default=2)
ion_search_group.add_argument('--msn-quant-from', help='The ms level to quantify values from. i.e. if we are identifying an ion in ms2, we can quantify it in ms1 (or ms2). Default: msn value-1', type=int, default=None)
ion_search_group.add_argument('--msn-ion', help='M/Z values to search for in the scans. To search for multiple m/z values for a given ion, separate m/z values with a comma.', nargs='+', type=str)
ion_search_group.add_argument('--msn-ion-rt', help='RT values each ion is expected at.', nargs='+', type=float)
ion_search_group.add_argument('--msn-peaklist', help='A file containing peaks to search for in the scans.', type=argparse.FileType('rb'))
ion_search_group.add_argument('--msn-ppm', help='The error tolerance for identifying the ion(s).', type=float, default=200)
ion_search_group.add_argument('--msn-rt-window', help='The range of retention times for identifying the ion(s). (ex: 7.54-9.43)', type=str, nargs='+')
ion_search_group.add_argument('--msn-all-scans', help='Search for the ion across all scans (ie if you have 3 ions, you will have 3 results with one long XIC)', action='store_true')
ion_search_group.add_argument('--require-all-ions', help='If multiple ions are set (in the style of 93.15,105.15), all ions must be found in a scan.', action='store_true')
quant_parameters = pyquant_parser.add_argument_group('Quantification Parameters')
quant_parameters.add_argument('--quant-method', help='The process to use for quantification. Default: Integrate for ms1, sum for ms2+.', choices=['integrate', 'sum'], default=None)
quant_parameters.add_argument('--reporter-ion', help='Indicates that reporter ions are being used. As such, we only analyze a single scan.', action='store_true')
quant_parameters.add_argument('--isotopologue-limit', help='How many isotopologues to quantify', type=int, default=-1)
quant_parameters.add_argument('--overlapping-labels', help='This declares the mz values of labels will overlap. It is useful for data such as neucode, but not needed for only SILAC labeling.', action='store_true')
quant_parameters.add_argument('--labels-needed', help='How many labels need to be detected to quantify a scan (ie if you have a 2 state experiment and set this to 2, it will only quantify scans where both occur.', default=1, type=int)
quant_parameters.add_argument('--merge-labels', help='Merge labels together to a single XIC.', action='store_true')
quant_parameters.add_argument('--min-scans', help='How many quantification scans are needed to quantify a scan.', default=1, type=int)
quant_parameters.add_argument('--min-resolution', help='The minimal resolving power of a scan to consider for quantification. Useful for skipping low-res scans', default=0, type=float)
quant_parameters.add_argument('--no-mass-accuracy-correction', help='Disables the mass accuracy correction.', action='store_true')
quant_parameters.add_argument('--no-contaminant-detection', help='Disables routine to check if an ion is a contaminant of a nearby peptide (checks if its a likely isotopologue).', action='store_true')
peak_parameters = pyquant_parser.add_argument_group('Peak Fitting Parameters')
peak_parameters.add_argument('--peak-find-method', help='The method to use to identify peaks within data. For LC-MS, relative-max is usually best. For smooth data, derivative is better.', type=str, choices=(PEAK_FINDING_REL_MAX, PEAK_FINDING_DERIVATIVE), default=PEAK_FINDING_REL_MAX)
peak_parameters.add_argument(
'--peak-find-mode',
help='This picks some predefined parameters for various use cases. Fast is good for robust data with few peaks, slow is good for complex data with overlapping peaks of very different size.',
type=str,
choices=(PEAK_FIT_MODE_SLOW, PEAK_FIT_MODE_AVERAGE, PEAK_FIT_MODE_FAST),
default=PEAK_FIT_MODE_AVERAGE
)
peak_parameters.add_argument('--gap-interpolation', help='This interpolates missing data in scans. The parameter should be a number that is the maximal gap size to fill (ie 2 means a gap of 2 scans). Can be useful for low intensity LC-MS data.', type=int, default=0)
peak_parameters.add_argument('--remove-baseline', help='Fit a separate line for the baseline of each peak.', action='store_true')
peak_parameters.add_argument('--peak-cutoff', help='The threshold from the initial retention time a peak can fall by before being discarded', type=float, default=0.05)
peak_parameters.add_argument('--max-peaks', help='The maximal number of peaks to detect per scan. A lower value can help with very noisy data.', type=int, default=-1)
peak_parameters.add_argument('--peaks-n', help='The number of peaks to report per scan. Useful for ions with multiple elution times.', type=int, default=1)
peak_parameters.add_argument('--no-rt-guide', help='Do not use the retention time to bias for peaks containing the MS trigger time.', action='store_true')
peak_parameters.add_argument('--snr-filter', help='Filter peaks below a given SNR.', type=float, default=0)
peak_parameters.add_argument('--zscore-filter', help='Peaks below a given z-score are excluded.', type=float, default=0)
peak_parameters.add_argument('--filter-width', help='The window size for snr/zscore filtering. Default: entire scan', type=float, default=0)
peak_parameters.add_argument('--r2-cutoff', help='The minimal R^2 for a peak to be kept. Should be a value between 0 and 1', type=float, default=None)
peak_parameters.add_argument('--intensity-filter', help='Filter peaks whose peak are below a given intensity.', type=float, default=0)
peak_parameters.add_argument('--percentile-filter', help='Filter peaks whose peak are below a given percentile of the data.', type=float, default=0)
peak_parameters.add_argument('--min-peak-separation', help='Peaks separated by less than this distance will be combined. For very crisp data, set this to a lower number. (minimal value is 1)', type=int, default=5)
peak_parameters.add_argument('--disable-peak-filtering', help='This will disable smoothing of data prior to peak finding. If you have very good LC, this may be used to identify small peaks.', action='store_true')
peak_parameters.add_argument('--merge-isotopes', help='Merge Isotopologues together prior to fitting.', action='store_true')
peak_parameters.add_argument('--peak-resolution-mode', help='The method to use to resolve peaks across multiple XICs', choices=(PEAK_RESOLUTION_RT_MODE, PEAK_RESOLUTION_COMMON_MODE), type=str, default='common-peak')
xic_parameters = pyquant_parser.add_argument_group('XIC Options')
xic_parameters.add_argument('--xic-snr', help='When the SNR of the XIC falls below this, stop searching for more data. Useful for escaping from noisy shoulders and contaminants.', type=float, default=1.0)
xic_parameters.add_argument('--xic-missing-ion-count', help='This specifies how many consequtive scans an ion can be missing for until it is no longer considered.', type=int, default=1)
xic_parameters.add_argument('--xic-window-size', help='When the number of scans in a given direction from the initial datapoint of an XIC passes this, stop. Default is -1 (disabled). Useful for removing contaminants', type=int, default=-1)
xic_parameters.add_argument('--xic-smooth', help='Prior to fitting, smooth data with a Gaussian filter.', action='store_true')
xic_parameters.add_argument('--export-msn', help='This will export spectra of a given MSN that were used to provide the quantification.', action='store_false')
mrm_parameters = pyquant_parser.add_argument_group('SRM/MRM Parameters')
#'A file indicating light and heavy peptide pairs, and optionally the known elution time.'
mrm_parameters.add_argument('--mrm-map', help=argparse.SUPPRESS, type=argparse.FileType('r'))
output_group = pyquant_parser.add_argument_group("Output Options")
output_group.add_argument('--debug', help="This will output debug information.", action='store_true')
output_group.add_argument('--html', help="Output a HTML table summary.", action='store_true')
output_group.add_argument('--resume', help="Will resume from the last run. Only works if not directing output to stdout.", action='store_true')
output_group.add_argument('--sample', help="How much of the data to sample. Enter as a decimal (ie 1.0 for everything, 0.1 for 10%%)", type=float, default=1.0)
output_group.add_argument('--disable-stats', help="Disable confidence statistics on data.", action='store_true')
output_group.add_argument('--no-ratios', help="Disable reporting of ratios in output.", action='store_true')
output_group.add_argument('-o', '--out', nargs='?', help='The prefix for the file output', type=str)
PER_PEAK = 'per-peak'
PER_FILE = 'per-file'
PER_ID = 'per-id'
spectra_output = pyquant_parser.add_argument_group("Spectra Output Options")
spectra_output.add_argument('--export-mzml', help='Create an mzml file of spectra contained within each peak.', action='store_true')
spectra_output.add_argument('--export-mode', help='How to export the scans. per-peak: A mzML per peak identified. per-id: A mzML per ion identified (each row of the output gets an mzML). per-file: All scans matched per raw file.', type=str, default='per-peak', choices={PER_PEAK, PER_ID, PER_FILE})
convenience_group = pyquant_parser.add_argument_group('Convenience Parameters')
convenience_group.add_argument('--neucode', help='This will select parameters specific for neucode. Note: You still must define a labeling scheme.', action='store_true')
convenience_group.add_argument('--isobaric-tags', help='This will select parameters specific for isobaric tag based labeling (TMT/iTRAQ).', action='store_true')
convenience_group.add_argument('--ms3', help='This will select parameters specific for ms3 based quantification.', action='store_true')
convenience_group.add_argument('--maxquant', help='This will select parameters specific for a MaxQuant evidence file.', action='store_true')
convenience_group.add_argument('--gcms', help='This will select parameters specific for ion identification and quantification in GCMS experiments.', action='store_true')
#'This will select parameters specific for Selective/Multiple Reaction Monitoring (SRM/MRM).'
convenience_group.add_argument('--mrm', help=argparse.SUPPRESS, action='store_true')
| {
"repo_name": "pandeylab/pyquant",
"path": "pyquant/__init__.py",
"copies": "1",
"size": "14839",
"license": "mit",
"hash": 2856051937511392000,
"line_mean": 98.5906040268,
"line_max": 298,
"alpha_frac": 0.7553743514,
"autogenerated": false,
"ratio": 3.527216543855479,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4782590895255479,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import sqlite3 as lite
import os
from constants import DATA_FOLDER
from protos.objects import Listings, Followers, Following
from dht.node import Node
from binascii import unhexlify
from collections import Counter
class Database(object):
# pylint: disable=W0601
DATABASE = None
def __init__(self, testnet=False, filepath=None):
global DATABASE
self.TESTNET = testnet
if testnet:
DATABASE = DATA_FOLDER + "OB-Testnet.db"
else:
DATABASE = DATA_FOLDER + "OB-Mainnet.db"
self.DATABASE = DATABASE
if filepath:
DATABASE = filepath
if not os.path.exists(DATA_FOLDER + "cache/"):
os.makedirs(DATA_FOLDER + "cache/")
if not os.path.exists(DATA_FOLDER + "store/listings/contracts/"):
os.makedirs(DATA_FOLDER + "store/listings/contracts/")
if not os.path.exists(DATA_FOLDER + "store/listings/in progress/"):
os.makedirs(DATA_FOLDER + "store/listings/in progress/")
if not os.path.exists(DATA_FOLDER + "store/listings/unfunded/"):
os.makedirs(DATA_FOLDER + "store/listings/unfunded/")
if not os.path.exists(DATA_FOLDER + "store/listings/trade receipts/"):
os.makedirs(DATA_FOLDER + "store/listings/trade receipts/")
if not os.path.exists(DATA_FOLDER + "store/media/"):
os.makedirs(DATA_FOLDER + "store/media/")
if not os.path.exists(DATA_FOLDER + "purchases/in progress/"):
os.makedirs(DATA_FOLDER + "purchases/in progress/")
if not os.path.exists(DATA_FOLDER + "purchases/unfunded/"):
os.makedirs(DATA_FOLDER + "purchases/unfunded/")
if not os.path.exists(DATA_FOLDER + "purchases/trade receipts/"):
os.makedirs(DATA_FOLDER + "purchases/trade receipts/")
if not os.path.isfile(DATABASE):
self.create_database()
if os.path.exists(DATA_FOLDER + "cache.pickle"):
os.remove(DATA_FOLDER + "cache.pickle")
@staticmethod
def create_database(filepath=None):
if filepath is None:
db = lite.connect(DATABASE)
else:
db = lite.connect(filepath)
cursor = db.cursor()
cursor.execute('''PRAGMA user_version = 0''')
cursor.execute('''CREATE TABLE hashmap(hash TEXT PRIMARY KEY, filepath TEXT)''')
cursor.execute('''CREATE TABLE profile(id INTEGER PRIMARY KEY, serializedUserInfo BLOB)''')
cursor.execute('''CREATE TABLE listings(id INTEGER PRIMARY KEY, serializedListings BLOB)''')
cursor.execute('''CREATE TABLE keys(type TEXT PRIMARY KEY, privkey BLOB, pubkey BLOB)''')
cursor.execute('''CREATE TABLE followers(id INTEGER PRIMARY KEY, serializedFollowers BLOB)''')
cursor.execute('''CREATE TABLE following(id INTEGER PRIMARY KEY, serializedFollowing BLOB)''')
cursor.execute('''CREATE TABLE messages(guid TEXT, handle TEXT, signed_pubkey BLOB,
encryption_pubkey BLOB, subject TEXT, message_type TEXT, message TEXT, timestamp INTEGER,
avatar_hash BLOB, signature BLOB, outgoing INTEGER, read INTEGER)''')
cursor.execute('''CREATE INDEX index_messages_guid ON messages(guid);''')
cursor.execute('''CREATE INDEX index_messages_read ON messages(read);''')
cursor.execute('''CREATE TABLE notifications(id TEXT PRIMARY KEY, guid BLOB, handle TEXT, type TEXT,
order_id TEXT, title TEXT, timestamp INTEGER, image_hash BLOB, read INTEGER)''')
cursor.execute('''CREATE TABLE broadcasts(id TEXT PRIMARY KEY, guid BLOB, handle TEXT, message TEXT,
timestamp INTEGER, avatar_hash BLOB)''')
cursor.execute('''CREATE TABLE vendors(guid TEXT PRIMARY KEY, ip TEXT, port INTEGER, signedPubkey BLOB)''')
cursor.execute('''CREATE TABLE moderators(guid TEXT PRIMARY KEY, signedPubkey BLOB, encryptionKey BLOB,
encryptionSignature BLOB, bitcoinKey BLOB, bitcoinSignature BLOB, handle TEXT, name TEXT, description TEXT,
avatar BLOB, fee FLOAT)''')
cursor.execute('''CREATE TABLE purchases(id TEXT PRIMARY KEY, title TEXT, description TEXT,
timestamp INTEGER, btc FLOAT, address TEXT, status INTEGER, outpoint BLOB, thumbnail BLOB, seller TEXT,
proofSig BLOB, contract_type TEXT)''')
cursor.execute('''CREATE TABLE sales(id TEXT PRIMARY KEY, title TEXT, description TEXT,
timestamp INTEGER, btc REAL, address TEXT, status INTEGER, thumbnail BLOB, outpoint BLOB, buyer TEXT,
paymentTX TEXT, contract_type TEXT)''')
cursor.execute('''CREATE TABLE settings(id INTEGER PRIMARY KEY, refundAddress TEXT, currencyCode TEXT,
country TEXT, language TEXT, timeZone TEXT, notifications INTEGER, shippingAddresses BLOB, blocked BLOB,
libbitcoinServer TEXT, SSL INTEGER, seed TEXT, terms_conditions TEXT, refund_policy TEXT)''')
db.commit()
return db
class HashMap(object):
"""
Creates a table in the database for mapping file hashes (which are sent
over the wire in a query) with a more human readable filename in local
storage. This is useful for users who want to look through their store
data on disk.
"""
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def insert(self, hash_value, filepath):
cursor = self.db.cursor()
cursor.execute('''INSERT OR REPLACE INTO hashmap(hash, filepath)
VALUES (?,?)''', (hash_value, filepath))
self.db.commit()
def get_file(self, hash_value):
cursor = self.db.cursor()
cursor.execute('''SELECT filepath FROM hashmap WHERE hash=?''', (hash_value,))
ret = cursor.fetchone()
if ret is None:
return None
return ret[0]
def get_all(self):
cursor = self.db.cursor()
cursor.execute('''SELECT * FROM hashmap ''')
ret = cursor.fetchall()
return ret
def delete(self, hash_value):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM hashmap WHERE hash = ?''', (hash_value,))
self.db.commit()
def delete_all(self):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM hashmap''')
self.db.commit()
class ProfileStore(object):
"""
Stores the user's profile data in the db. The profile is stored as a serialized
Profile protobuf object. It's done this way because because protobuf is more
flexible and allows for storing custom repeated fields (like the SocialAccount
object). Also we will just serve this over the wire so we don't have to manually
rebuild it every startup. To interact with the profile you should use the
`market.profile` module and not this class directly.
"""
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def set_proto(self, proto):
cursor = self.db.cursor()
cursor.execute('''INSERT OR REPLACE INTO profile(id, serializedUserInfo)
VALUES (?,?)''', (1, proto))
self.db.commit()
def get_proto(self):
cursor = self.db.cursor()
cursor.execute('''SELECT serializedUserInfo FROM profile WHERE id = 1''')
ret = cursor.fetchone()
if ret is None:
return None
return ret[0]
class ListingsStore(object):
"""
Stores a serialized `Listings` protobuf object. It contains metadata for all the
contracts hosted by this store. We will send this in response to a GET_LISTING
query. This should be updated each time a new contract is created.
"""
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def add_listing(self, proto):
"""
Will also update an existing listing if the contract hash is the same.
"""
cursor = self.db.cursor()
l = Listings()
ser = self.get_proto()
if ser is not None:
l.ParseFromString(ser)
for listing in l.listing:
if listing.contract_hash == proto.contract_hash:
l.listing.remove(listing)
l.listing.extend([proto])
cursor.execute('''INSERT OR REPLACE INTO listings(id, serializedListings)
VALUES (?,?)''', (1, l.SerializeToString()))
self.db.commit()
def delete_listing(self, hash_value):
cursor = self.db.cursor()
ser = self.get_proto()
if ser is None:
return
l = Listings()
l.ParseFromString(ser)
for listing in l.listing:
if listing.contract_hash == hash_value:
l.listing.remove(listing)
cursor.execute('''INSERT OR REPLACE INTO listings(id, serializedListings)
VALUES (?,?)''', (1, l.SerializeToString()))
self.db.commit()
def delete_all_listings(self):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM listings''')
self.db.commit()
def get_proto(self):
cursor = self.db.cursor()
cursor.execute('''SELECT serializedListings FROM listings WHERE id = 1''')
ret = cursor.fetchone()
if ret is None:
return None
return ret[0]
class KeyStore(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def set_key(self, key_type, privkey, pubkey):
cursor = self.db.cursor()
cursor.execute('''INSERT OR REPLACE INTO keys(type, privkey, pubkey)
VALUES (?,?,?)''', (key_type, privkey, pubkey))
self.db.commit()
def get_key(self, key_type):
cursor = self.db.cursor()
cursor.execute('''SELECT privkey, pubkey FROM keys WHERE type=?''', (key_type,))
ret = cursor.fetchone()
if not ret:
return None
else:
return ret
def delete_all_keys(self):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM keys''')
self.db.commit()
class FollowData(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def follow(self, proto):
cursor = self.db.cursor()
f = Following()
ser = self.get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == proto.guid:
f.users.remove(user)
f.users.extend([proto])
cursor.execute('''INSERT OR REPLACE INTO following(id, serializedFollowing) VALUES (?,?)''',
(1, f.SerializeToString()))
self.db.commit()
def unfollow(self, guid):
cursor = self.db.cursor()
f = Following()
ser = self.get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == guid:
f.users.remove(user)
cursor.execute('''INSERT OR REPLACE INTO following(id, serializedFollowing) VALUES (?,?)''',
(1, f.SerializeToString()))
self.db.commit()
def get_following(self):
cursor = self.db.cursor()
cursor.execute('''SELECT serializedFollowing FROM following WHERE id=1''')
ret = cursor.fetchall()
if not ret:
return None
else:
return ret[0][0]
def is_following(self, guid):
f = Following()
ser = self.get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == guid:
return True
return False
def set_follower(self, proto):
cursor = self.db.cursor()
f = Followers()
ser = self.get_followers()
if ser is not None:
f.ParseFromString(ser)
for follower in f.followers:
if follower.guid == proto.guid:
f.followers.remove(follower)
f.followers.extend([proto])
cursor.execute('''INSERT OR REPLACE INTO followers(id, serializedFollowers) VALUES (?,?)''',
(1, f.SerializeToString()))
self.db.commit()
def delete_follower(self, guid):
cursor = self.db.cursor()
f = Followers()
ser = self.get_followers()
if ser is not None:
f.ParseFromString(ser)
for follower in f.followers:
if follower.guid == guid:
f.followers.remove(follower)
cursor.execute('''INSERT OR REPLACE INTO followers(id, serializedFollowers) VALUES (?,?)''',
(1, f.SerializeToString()))
self.db.commit()
def get_followers(self):
cursor = self.db.cursor()
cursor.execute('''SELECT serializedFollowers FROM followers WHERE id=1''')
proto = cursor.fetchone()
if not proto:
return None
else:
return proto[0]
class MessageStore(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def save_message(self, guid, handle, signed_pubkey, encryption_pubkey, subject,
message_type, message, timestamp, avatar_hash, signature, is_outgoing):
outgoing = 1 if is_outgoing else 0
cursor = self.db.cursor()
cursor.execute('''INSERT INTO messages(guid, handle, signed_pubkey, encryption_pubkey, subject,
message_type, message, timestamp, avatar_hash, signature, outgoing, read) VALUES (?,?,?,?,?,?,?,?,?,?,?,?)''',
(guid, handle, signed_pubkey, encryption_pubkey, subject, message_type,
message, timestamp, avatar_hash, signature, outgoing, 0))
self.db.commit()
def get_messages(self, guid, message_type):
cursor = self.db.cursor()
cursor.execute('''SELECT guid, handle, signed_pubkey, encryption_pubkey, subject, message_type, message,
timestamp, avatar_hash, signature, outgoing, read FROM messages WHERE guid=? AND message_type=?''',
(guid, message_type))
return cursor.fetchall()
def get_conversations(self):
cursor = self.db.cursor()
cursor.execute('''SELECT DISTINCT guid FROM messages''',)
guids = cursor.fetchall()
ret = []
unread = self.get_unread()
for g in guids:
cursor.execute('''SELECT avatar_hash, message, max(timestamp), encryption_pubkey FROM messages
WHERE guid=? and message_type="CHAT"''', (g[0],))
val = cursor.fetchone()
if val is not None:
ret.append({"guid": g[0],
"avatar_hash": val[0].encode("hex"),
"last_message": val[1],
"timestamp": val[2],
"encryption_key": val[3].encode("hex"),
"unread": 0 if g[0] not in unread else unread[g[0]]})
return ret
def get_unread(self):
cursor = self.db.cursor()
cursor.execute('''SELECT guid FROM messages WHERE read=0 and outgoing=0''',)
ret = []
guids = cursor.fetchall()
for g in guids:
ret.append(g[0])
return Counter(ret)
def mark_as_read(self, guid):
cursor = self.db.cursor()
cursor.execute('''UPDATE messages SET read=? WHERE guid=?;''', (1, guid))
self.db.commit()
def delete_message(self, guid):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM messages WHERE guid=? AND message_type="CHAT"''', (guid, ))
self.db.commit()
class NotificationStore(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def save_notification(self, notif_id, guid, handle, notif_type, order_id, title, timestamp, image_hash):
cursor = self.db.cursor()
cursor.execute('''INSERT INTO notifications(id, guid, handle, type, order_id, title, timestamp,
image_hash, read) VALUES (?,?,?,?,?,?,?,?,?)''', (notif_id, guid, handle, notif_type, order_id, title, timestamp,
image_hash, 0))
self.db.commit()
def get_notifications(self):
cursor = self.db.cursor()
cursor.execute('''SELECT id, guid, handle, type, order_id, title, timestamp, image_hash, read
FROM notifications''')
return cursor.fetchall()
def mark_as_read(self, notif_id):
cursor = self.db.cursor()
cursor.execute('''UPDATE notifications SET read=? WHERE id=?;''', (1, notif_id))
self.db.commit()
def delete_notification(self, notif_id):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM notifications WHERE id=?''', (notif_id,))
self.db.commit()
class BroadcastStore(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def save_broadcast(self, broadcast_id, guid, handle, message, timestamp, avatar_hash):
cursor = self.db.cursor()
cursor.execute('''INSERT INTO broadcasts(id, guid, handle, message, timestamp, avatar_hash)
VALUES (?,?,?,?,?,?)''', (broadcast_id, guid, handle, message, timestamp, avatar_hash))
self.db.commit()
def get_broadcasts(self):
cursor = self.db.cursor()
cursor.execute('''SELECT id, guid, handle, message, timestamp, avatar_hash FROM broadcasts''')
return cursor.fetchall()
def delete_broadcast(self, broadcast_id):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM broadcasts WHERE id=?''', (broadcast_id,))
self.db.commit()
class VendorStore(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def save_vendor(self, guid, ip, port, signed_pubkey):
cursor = self.db.cursor()
try:
cursor.execute('''INSERT OR REPLACE INTO vendors(guid, ip, port, signedPubkey)
VALUES (?,?,?,?)''', (guid, ip, port, signed_pubkey))
except Exception as e:
print e.message
self.db.commit()
def get_vendors(self):
cursor = self.db.cursor()
cursor.execute('''SELECT guid, ip, port, signedPubkey FROM vendors''')
ret = cursor.fetchall()
nodes = []
for n in ret:
node = Node(unhexlify(n[0]), n[1], n[2], n[3], True)
nodes.append(node)
return nodes
def delete_vendor(self, guid):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM vendors WHERE guid=?''', (guid,))
self.db.commit()
class ModeratorStore(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def save_moderator(self, guid, signed_pubkey, encryption_key, encription_sig,
bitcoin_key, bicoin_sig, name, avatar_hash, fee, handle="", short_desc=""):
cursor = self.db.cursor()
try:
cursor.execute('''INSERT OR REPLACE INTO moderators(guid, signedPubkey, encryptionKey,
encryptionSignature, bitcoinKey, bitcoinSignature, handle, name, description, avatar, fee)
VALUES (?,?,?,?,?,?,?,?,?,?,?)''', (guid, signed_pubkey, encryption_key, encription_sig, bitcoin_key,
bicoin_sig, handle, name, short_desc, avatar_hash, fee))
except Exception as e:
print e.message
self.db.commit()
def get_moderator(self, guid):
cursor = self.db.cursor()
cursor.execute('''SELECT guid, signedPubkey, encryptionKey, encryptionSignature, bitcoinKey,
bitcoinSignature, handle, name, description, avatar, fee FROM moderators WHERE guid=?''', (guid,))
ret = cursor.fetchall()
if not ret:
return None
else:
return ret[0]
def delete_moderator(self, guid):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM moderators WHERE guid=?''', (guid,))
self.db.commit()
def clear_all(self):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM moderators''')
self.db.commit()
class Purchases(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def new_purchase(self, order_id, title, description, timestamp, btc,
address, status, thumbnail, seller, proofSig, contract_type):
cursor = self.db.cursor()
try:
cursor.execute('''INSERT OR REPLACE INTO purchases(id, title, description, timestamp, btc,
address, status, thumbnail, seller, proofSig, contract_type) VALUES (?,?,?,?,?,?,?,?,?,?,?)''',
(order_id, title, description, timestamp, btc, address,
status, thumbnail, seller, proofSig, contract_type))
except Exception as e:
print e.message
self.db.commit()
def get_purchase(self, order_id):
cursor = self.db.cursor()
cursor.execute('''SELECT id, title, description, timestamp, btc, address, status,
thumbnail, seller, contract_type, proofSig FROM purchases WHERE id=?''', (order_id,))
ret = cursor.fetchall()
if not ret:
return None
else:
return ret[0]
def delete_purchase(self, order_id):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM purchases WHERE id=?''', (order_id,))
self.db.commit()
def get_all(self):
cursor = self.db.cursor()
cursor.execute('''SELECT id, title, description, timestamp, btc, status,
thumbnail, seller, contract_type FROM purchases ''')
return cursor.fetchall()
def get_unfunded(self):
cursor = self.db.cursor()
cursor.execute('''SELECT id FROM purchases WHERE status=0''')
return cursor.fetchall()
def update_status(self, order_id, status):
cursor = self.db.cursor()
cursor.execute('''UPDATE purchases SET status=? WHERE id=?;''', (status, order_id))
self.db.commit()
def update_outpoint(self, order_id, outpoint):
cursor = self.db.cursor()
cursor.execute('''UPDATE purchases SET outpoint=? WHERE id=?;''', (outpoint, order_id))
self.db.commit()
def get_outpoint(self, order_id):
cursor = self.db.cursor()
cursor.execute('''SELECT outpoint FROM purchases WHERE id=?''', (order_id,))
ret = cursor.fetchone()
if not ret:
return None
else:
return ret[0]
class Sales(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def new_sale(self, order_id, title, description, timestamp, btc,
address, status, thumbnail, buyer, contract_type):
cursor = self.db.cursor()
try:
cursor.execute('''INSERT OR REPLACE INTO sales(id, title, description, timestamp, btc, address,
status, thumbnail, buyer, contract_type) VALUES (?,?,?,?,?,?,?,?,?,?)''',
(order_id, title, description, timestamp, btc, address, status,
thumbnail, buyer, contract_type))
except Exception as e:
print e.message
self.db.commit()
def get_sale(self, order_id):
cursor = self.db.cursor()
cursor.execute('''SELECT id, title, description, timestamp, btc, address, status,
thumbnail, buyer, contract_type FROM sales WHERE id=?''', (order_id,))
ret = cursor.fetchall()
if not ret:
return None
else:
return ret[0]
def delete_sale(self, order_id):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM sales WHERE id=?''', (order_id,))
self.db.commit()
def get_all(self):
cursor = self.db.cursor()
cursor.execute('''SELECT id, title, description, timestamp, btc, status,
thumbnail, buyer, contract_type FROM sales ''')
return cursor.fetchall()
def get_unfunded(self):
cursor = self.db.cursor()
cursor.execute('''SELECT id FROM sales WHERE status=0''')
return cursor.fetchall()
def update_status(self, order_id, status):
cursor = self.db.cursor()
cursor.execute('''UPDATE sales SET status=? WHERE id=?;''', (status, order_id))
self.db.commit()
def update_outpoint(self, order_id, outpoint):
cursor = self.db.cursor()
cursor.execute('''UPDATE sales SET outpoint=? WHERE id=?;''', (outpoint, order_id))
self.db.commit()
def update_payment_tx(self, order_id, txid):
cursor = self.db.cursor()
cursor.execute('''UPDATE sales SET paymentTX=? WHERE id=?;''', (txid, order_id))
self.db.commit()
def get_outpoint(self, order_id):
cursor = self.db.cursor()
cursor.execute('''SELECT outpoint FROM sales WHERE id=?''', (order_id,))
ret = cursor.fetchone()
if not ret:
return None
else:
return ret[0]
class Settings(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def update(self, refundAddress, currencyCode, country, language, timeZone, notifications,
shipping_addresses, blocked, libbitcoinServer, ssl, seed, terms_conditions, refund_policy):
cursor = self.db.cursor()
cursor.execute('''INSERT OR REPLACE INTO settings(id, refundAddress, currencyCode, country,
language, timeZone, notifications, shippingAddresses, blocked, libbitcoinServer, ssl, seed,
terms_conditions, refund_policy) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)''',
(1, refundAddress, currencyCode, country, language, timeZone,
notifications, shipping_addresses, blocked,
libbitcoinServer, ssl, seed, terms_conditions,
refund_policy))
self.db.commit()
def get(self):
cursor = self.db.cursor()
cursor.execute('''SELECT * FROM settings WHERE id=1''')
ret = cursor.fetchall()
if not ret:
return None
else:
return ret[0]
| {
"repo_name": "hauxir/OpenBazaar-Server",
"path": "db/datastore.py",
"copies": "1",
"size": "28186",
"license": "mit",
"hash": -5354942035978958000,
"line_mean": 41.1315396114,
"line_max": 116,
"alpha_frac": 0.556340027,
"autogenerated": false,
"ratio": 4.37263419174682,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0017588282625884066,
"num_lines": 669
} |
__author__ = 'chris'
import sqlite3 as lite
import os
from constants import DATA_FOLDER
from protos.objects import Listings, Followers, Following
from dht.node import Node
class Database(object):
# pylint: disable=W0601
DATABASE = None
def __init__(self, testnet=False, filepath=None):
global DATABASE
self.TESTNET = testnet
if testnet:
DATABASE = DATA_FOLDER + "OB-Testnet.db"
else:
DATABASE = DATA_FOLDER + "OB-Mainnet.db"
self.DATABASE = DATABASE
if filepath:
DATABASE = filepath
if not os.path.exists(DATA_FOLDER):
os.makedirs(DATA_FOLDER + "cache/")
os.makedirs(DATA_FOLDER + "store/listings/contracts/")
os.makedirs(DATA_FOLDER + "store/listings/in progress/")
os.makedirs(DATA_FOLDER + "store/listings/trade receipts/")
os.makedirs(DATA_FOLDER + "store/media/")
os.makedirs(DATA_FOLDER + "purchases/in progress/")
os.makedirs(DATA_FOLDER + "purchases/trade receipts/")
if not os.path.isfile(DATABASE):
self.create_database()
@staticmethod
def create_database(filepath=None):
if filepath is None:
db = lite.connect(DATABASE)
else:
db = lite.connect(filepath)
cursor = db.cursor()
cursor.execute('''CREATE TABLE hashmap(hash BLOB PRIMARY KEY, filepath TEXT)''')
cursor.execute('''CREATE TABLE profile(id INTEGER PRIMARY KEY, serializedUserInfo BLOB)''')
cursor.execute('''CREATE TABLE listings(id INTEGER PRIMARY KEY, serializedListings BLOB)''')
cursor.execute('''CREATE TABLE keys(type TEXT PRIMARY KEY, privkey BLOB, pubkey BLOB)''')
cursor.execute('''CREATE TABLE followers(id INTEGER PRIMARY KEY, serializedFollowers BLOB)''')
cursor.execute('''CREATE TABLE following(id INTEGER PRIMARY KEY, serializedFollowing BLOB)''')
cursor.execute('''CREATE TABLE messages(guid BLOB , handle TEXT, signed_pubkey BLOB,
encryption_pubkey BLOB, subject TEXT, message_type TEXT, message TEXT, timestamp, INTEGER,
avatar_hash BLOB, signature BLOB, outgoing INTEGER)''')
cursor.execute('''CREATE TABLE notifications(guid BLOB, handle TEXT, message TEXT,
timestamp INTEGER, avatar_hash BLOB)''')
cursor.execute('''CREATE TABLE vendors(guid BLOB UNIQUE, ip TEXT, port INTEGER, signedPubkey BLOB)''')
cursor.execute('''CREATE INDEX idx1 ON vendors(guid);''')
cursor.execute('''CREATE TABLE moderators(guid BLOB UNIQUE, signedPubkey BLOB, encryptionKey BLOB,
encryptionSignature BLOB, bitcoinKey BLOB, bitcoinSignature BLOB, handle TEXT)''')
cursor.execute('''CREATE INDEX idx2 ON moderators(guid);''')
cursor.execute('''CREATE TABLE purchases(id BLOB UNIQUE, title TEXT, timestamp INTEGER, btc FLOAT,
address TEXT, status INTEGER, thumbnail BLOB, seller TEXT, proofSig BLOB)''')
cursor.execute('''CREATE INDEX idx3 ON purchases(id);''')
cursor.execute('''CREATE TABLE sales(id BLOB UNIQUE, title TEXT, timestamp INTEGER, btc REAL,
address TEXT, status INTEGER, thumbnail BLOB, seller TEXT)''')
cursor.execute('''CREATE INDEX idx4 ON sales(id);''')
db.commit()
return db
class HashMap(object):
"""
Creates a table in the database for mapping file hashes (which are sent
over the wire in a query) with a more human readable filename in local
storage. This is useful for users who want to look through their store
data on disk.
"""
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def insert(self, hash_value, filepath):
cursor = self.db.cursor()
cursor.execute('''INSERT OR REPLACE INTO hashmap(hash, filepath)
VALUES (?,?)''', (hash_value, filepath))
self.db.commit()
def get_file(self, hash_value):
cursor = self.db.cursor()
cursor.execute('''SELECT filepath FROM hashmap WHERE hash=?''', (hash_value,))
ret = cursor.fetchone()
if ret is None:
return None
return ret[0]
def get_all(self):
cursor = self.db.cursor()
cursor.execute('''SELECT * FROM hashmap ''')
ret = cursor.fetchall()
return ret
def delete(self, hash_value):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM hashmap WHERE hash = ?''', (hash_value,))
self.db.commit()
def delete_all(self):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM hashmap''')
self.db.commit()
class ProfileStore(object):
"""
Stores the user's profile data in the db. The profile is stored as a serialized
Profile protobuf object. It's done this way because because protobuf is more
flexible and allows for storing custom repeated fields (like the SocialAccount
object). Also we will just serve this over the wire so we don't have to manually
rebuild it every startup. To interact with the profile you should use the
`market.profile` module and not this class directly.
"""
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def set_proto(self, proto):
cursor = self.db.cursor()
cursor.execute('''INSERT OR REPLACE INTO profile(id, serializedUserInfo)
VALUES (?,?)''', (1, proto))
self.db.commit()
def get_proto(self):
cursor = self.db.cursor()
cursor.execute('''SELECT serializedUserInfo FROM profile WHERE id = 1''')
ret = cursor.fetchone()
if ret is None:
return None
return ret[0]
class ListingsStore(object):
"""
Stores a serialized `Listings` protobuf object. It contains metadata for all the
contracts hosted by this store. We will send this in response to a GET_LISTING
query. This should be updated each time a new contract is created.
"""
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def add_listing(self, proto):
"""
Will also update an existing listing if the contract hash is the same.
"""
cursor = self.db.cursor()
l = Listings()
ser = self.get_proto()
if ser is not None:
l.ParseFromString(ser)
for listing in l.listing:
if listing.contract_hash == proto.contract_hash:
l.listing.remove(listing)
l.listing.extend([proto])
cursor.execute('''INSERT OR REPLACE INTO listings(id, serializedListings)
VALUES (?,?)''', (1, l.SerializeToString()))
self.db.commit()
def delete_listing(self, hash_value):
cursor = self.db.cursor()
ser = self.get_proto()
if ser is None:
return
l = Listings()
l.ParseFromString(ser)
for listing in l.listing:
if listing.contract_hash == hash_value:
l.listing.remove(listing)
cursor.execute('''INSERT OR REPLACE INTO listings(id, serializedListings)
VALUES (?,?)''', (1, l.SerializeToString()))
self.db.commit()
def delete_all_listings(self):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM listings''')
self.db.commit()
def get_proto(self):
cursor = self.db.cursor()
cursor.execute('''SELECT serializedListings FROM listings WHERE id = 1''')
ret = cursor.fetchone()
if ret is None:
return None
return ret[0]
class KeyStore(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def set_key(self, key_type, privkey, pubkey):
cursor = self.db.cursor()
cursor.execute('''INSERT OR REPLACE INTO keys(type, privkey, pubkey)
VALUES (?,?,?)''', (key_type, privkey, pubkey))
self.db.commit()
def get_key(self, key_type):
cursor = self.db.cursor()
cursor.execute('''SELECT privkey, pubkey FROM keys WHERE type=?''', (key_type,))
ret = cursor.fetchone()
if not ret:
return None
else:
return ret
def delete_all_keys(self):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM keys''')
self.db.commit()
class FollowData(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def follow(self, proto):
cursor = self.db.cursor()
f = Following()
ser = self.get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == proto.guid:
f.users.remove(user)
f.users.extend([proto])
cursor.execute('''INSERT OR REPLACE INTO following(id, serializedFollowing) VALUES (?,?)''',
(1, f.SerializeToString()))
self.db.commit()
def unfollow(self, guid):
cursor = self.db.cursor()
f = Following()
ser = self.get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == guid:
f.users.remove(user)
cursor.execute('''INSERT OR REPLACE INTO following(id, serializedFollowing) VALUES (?,?)''',
(1, f.SerializeToString()))
self.db.commit()
def get_following(self):
cursor = self.db.cursor()
cursor.execute('''SELECT serializedFollowing FROM following WHERE id=1''')
ret = cursor.fetchall()
if not ret:
return None
else:
return ret[0][0]
def is_following(self, guid):
f = Following()
ser = self.get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == guid:
return True
return False
def set_follower(self, proto):
cursor = self.db.cursor()
f = Followers()
ser = self.get_followers()
if ser is not None:
for follower in f.followers:
if follower.guid == proto.guid:
f.followers.remove(follower)
f.followers.extend([proto])
cursor.execute('''INSERT OR REPLACE INTO followers(id, serializedFollowers) VALUES (?,?)''',
(1, f.SerializeToString()))
self.db.commit()
def delete_follower(self, guid):
cursor = self.db.cursor()
f = Followers()
ser = self.get_followers()
if ser is not None:
f.ParseFromString(ser)
for follower in f.followers:
if follower.guid == guid:
f.followers.remove(follower)
cursor.execute('''INSERT OR REPLACE INTO followers(id, serializedFollowers) VALUES (?,?)''',
(1, f.SerializeToString()))
self.db.commit()
def get_followers(self):
cursor = self.db.cursor()
cursor.execute('''SELECT serializedFollowers FROM followers WHERE id=1''')
proto = cursor.fetchone()
if not proto:
return None
else:
return proto[0]
class MessageStore(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def save_message(self, guid, handle, signed_pubkey, encryption_pubkey, subject,
message_type, message, timestamp, avatar_hash, signature, is_outgoing):
outgoing = 1 if is_outgoing else 0
cursor = self.db.cursor()
cursor.execute('''INSERT INTO messages(guid, handle, signed_pubkey, encryption_pubkey, subject,
message_type, message, timestamp, avatar_hash, signature, outgoing) VALUES (?,?,?,?,?,?,?,?,?,?,?)''',
(guid, handle, signed_pubkey, encryption_pubkey, subject, message_type,
message, timestamp, avatar_hash, signature, outgoing))
self.db.commit()
def get_messages(self, guid, message_type):
cursor = self.db.cursor()
cursor.execute('''SELECT guid, handle, signed_pubkey, encryption_pubkey, subject, message_type, message,
timestamp, avatar_hash, signature, outgoing FROM messages WHERE guid=? AND message_type=?''',
(guid, message_type))
ret = cursor.fetchall()
if not ret:
return None
else:
return ret
def delete_message(self, guid):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM messages WHERE guid=? AND message_type="CHAT"''', (guid, ))
self.db.commit()
class NotificationStore(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def save_notification(self, guid, handle, message, timestamp, avatar_hash):
cursor = self.db.cursor()
cursor.execute('''INSERT INTO notifications(guid, handle, message, timestamp, avatar_hash)
VALUES (?,?,?,?,?)''', (guid, handle, message, timestamp, avatar_hash))
self.db.commit()
def get_notifications(self):
cursor = self.db.cursor()
cursor.execute('''SELECT guid, handle, message, timestamp, avatar_hash FROM notifications''')
ret = cursor.fetchall()
if not ret:
return None
else:
return ret
def delete_notfication(self, guid, timestamp):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM notifications WHERE guid=? AND timestamp=?''', (guid, timestamp))
self.db.commit()
class VendorStore(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def save_vendor(self, guid, ip, port, signed_pubkey):
cursor = self.db.cursor()
try:
cursor.execute('''INSERT OR REPLACE INTO vendors(guid, ip, port, signedPubkey)
VALUES (?,?,?,?)''', (guid, ip, port, signed_pubkey))
except Exception as e:
print e.message
self.db.commit()
def get_vendors(self):
cursor = self.db.cursor()
cursor.execute('''SELECT guid, ip, port, signedPubkey FROM vendors''')
ret = cursor.fetchall()
nodes = []
for n in ret:
node = Node(n[0], n[1], n[2], n[3], True)
nodes.append(node)
return nodes
def delete_vendor(self, guid):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM vendors WHERE guid=?''', (guid,))
self.db.commit()
class ModeratorStore(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def save_moderator(self, guid, signed_pubkey, encryption_key, encription_sig,
bitcoin_key, bicoin_sig, handle=""):
cursor = self.db.cursor()
try:
cursor.execute('''INSERT OR REPLACE INTO moderators(guid, signedPubkey, encryptionKey,
encryptionSignature, bitcoinKey, bitcoinSignature, handle) VALUES (?,?,?,?,?,?,?)''',
(guid, signed_pubkey, encryption_key, encription_sig,
bitcoin_key, bicoin_sig, handle))
except Exception as e:
print e.message
self.db.commit()
def get_moderator(self, guid):
cursor = self.db.cursor()
cursor.execute('''SELECT guid, signedPubkey, encryptionKey, encryptionSignature, bitcoinKey,
bitcoinSignature, handle FROM moderators WHERE guid=?''', (guid,))
ret = cursor.fetchall()
if not ret:
return None
else:
return ret[0]
def delete_moderator(self, guid):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM moderators WHERE guid=?''', (guid,))
self.db.commit()
def clear_all(self):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM moderators''')
self.db.commit()
class Purchases(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def new_purchase(self, order_id, title, timestamp, btc,
address, status, thumbnail, seller, proofSig):
cursor = self.db.cursor()
try:
cursor.execute('''INSERT OR REPLACE INTO purchases(id, title, timestamp, btc, address, status,
thumbnail, seller, proofSig) VALUES (?,?,?,?,?,?,?,?,?)''',
(order_id, title, timestamp, btc, address, status, thumbnail, seller, proofSig))
except Exception as e:
print e.message
self.db.commit()
def get_purchase(self, order_id):
cursor = self.db.cursor()
cursor.execute('''SELECT id, title, timestamp, btc, address, status,
thumbnail, seller, proofSig FROM purchases WHERE id=?''', (order_id,))
ret = cursor.fetchall()
if not ret:
return None
else:
return ret[0]
def delete_purchase(self, order_id):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM purchases WHERE id=?''', (order_id,))
self.db.commit()
def get_all(self):
cursor = self.db.cursor()
cursor.execute('''SELECT id, title, timestamp, btc, address, status,
thumbnail, seller, proofSig FROM purchases ''')
ret = cursor.fetchall()
if not ret:
return None
else:
return ret
def update_status(self, order_id, status):
cursor = self.db.cursor()
cursor.execute('''UPDATE purchases SET status=? WHERE id=?;''', (status, order_id))
self.db.commit()
class Sales(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def new_sale(self, order_id, title, timestamp, btc,
address, status, thumbnail, seller):
cursor = self.db.cursor()
try:
cursor.execute('''INSERT OR REPLACE INTO sales(id, title, timestamp, btc, address, status,
thumbnail, seller) VALUES (?,?,?,?,?,?,?,?)''',
(order_id, title, timestamp, btc, address, status, thumbnail, seller))
except Exception as e:
print e.message
self.db.commit()
def get_sale(self, order_id):
cursor = self.db.cursor()
cursor.execute('''SELECT id, title, timestamp, btc, address, status,
thumbnail, seller FROM sales WHERE id=?''', (order_id,))
ret = cursor.fetchall()
if not ret:
return None
else:
return ret[0]
def delete_sale(self, order_id):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM sales WHERE id=?''', (order_id,))
self.db.commit()
def get_all(self):
cursor = self.db.cursor()
cursor.execute('''SELECT id, title, timestamp, btc, address, status,
thumbnail, seller FROM sales ''')
ret = cursor.fetchall()
if not ret:
return None
else:
return ret
def update_status(self, order_id, status):
cursor = self.db.cursor()
cursor.execute('''UPDATE sales SET status=? WHERE id=?;''', (status, order_id))
self.db.commit()
| {
"repo_name": "Joaz/OpenBazaar-Server",
"path": "db/datastore.py",
"copies": "3",
"size": "20943",
"license": "mit",
"hash": -4037693305700211000,
"line_mean": 39.6660194175,
"line_max": 116,
"alpha_frac": 0.5461490713,
"autogenerated": false,
"ratio": 4.457854406130268,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0035793547909875327,
"num_lines": 515
} |
__author__ = 'chris'
import sqlite3 as lite
from constants import DATABASE
from protos.objects import Listings, Followers, Following
from dht.node import Node
def create_database(filepath=None):
if filepath == None:
db = lite.connect(DATABASE)
else:
db = lite.connect(filepath)
cursor = db.cursor()
cursor.execute('''CREATE TABLE hashmap(hash BLOB PRIMARY KEY, filepath TEXT)''')
cursor.execute('''CREATE TABLE profile(id INTEGER PRIMARY KEY, serializedUserInfo BLOB)''')
cursor.execute('''CREATE TABLE listings(id INTEGER PRIMARY KEY, serializedListings BLOB)''')
cursor.execute('''CREATE TABLE keystore(type TEXT PRIMARY KEY, privkey BLOB, pubkey BLOB)''')
cursor.execute('''CREATE TABLE followers(id INTEGER PRIMARY KEY, serializedFollowers BLOB)''')
cursor.execute('''CREATE TABLE following(id INTEGER PRIMARY KEY, serializedFollowing BLOB)''')
cursor.execute('''CREATE TABLE messages(guid BLOB , handle TEXT, signed_pubkey BLOB,
encryption_pubkey BLOB, subject TEXT, message_type TEXT, message TEXT, timestamp, INTEGER,
avatar_hash BLOB, signature BLOB, outgoing INTEGER)''')
cursor.execute('''CREATE TABLE notifications(guid BLOB, handle TEXT, message TEXT,
timestamp INTEGER, avatar_hash BLOB)''')
cursor.execute('''CREATE TABLE vendors(guid BLOB UNIQUE, ip TEXT, port INTEGER, signedPubkey BLOB)''')
cursor.execute('''CREATE INDEX idx1 ON vendors(guid);''')
db.commit()
return db
class HashMap(object):
"""
Creates a table in the database for mapping file hashes (which are sent
over the wire in a query) with a more human readable filename in local
storage. This is useful for users who want to look through their store
data on disk.
"""
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def insert(self, hash_value, filepath):
cursor = self.db.cursor()
cursor.execute('''INSERT OR REPLACE INTO hashmap(hash, filepath)
VALUES (?,?)''', (hash_value, filepath))
self.db.commit()
def get_file(self, hash_value):
cursor = self.db.cursor()
cursor.execute('''SELECT filepath FROM hashmap WHERE hash=?''', (hash_value,))
ret = cursor.fetchone()
if ret is None:
return None
return ret[0]
def get_all(self):
cursor = self.db.cursor()
cursor.execute('''SELECT * FROM hashmap ''')
ret = cursor.fetchall()
return ret
def delete(self, hash_value):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM hashmap WHERE hash = ?''', (hash_value,))
self.db.commit()
def delete_all(self):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM hashmap''')
self.db.commit()
class ProfileStore(object):
"""
Stores the user's profile data in the db. The profile is stored as a serialized
Profile protobuf object. It's done this way because because protobuf is more
flexible and allows for storing custom repeated fields (like the SocialAccount
object). Also we will just serve this over the wire so we don't have to manually
rebuild it every startup. To interact with the profile you should use the
`market.profile` module and not this class directly.
"""
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def set_proto(self, proto):
cursor = self.db.cursor()
cursor.execute('''INSERT OR REPLACE INTO profile(id, serializedUserInfo)
VALUES (?,?)''', (1, proto))
self.db.commit()
def get_proto(self):
cursor = self.db.cursor()
cursor.execute('''SELECT serializedUserInfo FROM profile WHERE id = 1''')
ret = cursor.fetchone()
if ret is None:
return None
return ret[0]
class ListingsStore(object):
"""
Stores a serialized `Listings` protobuf object. It contains metadata for all the
contracts hosted by this store. We will send this in response to a GET_LISTING
query. This should be updated each time a new contract is created.
"""
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def add_listing(self, proto):
"""
Will also update an existing listing if the contract hash is the same.
"""
cursor = self.db.cursor()
l = Listings()
ser = self.get_proto()
if ser is not None:
l.ParseFromString(ser)
for listing in l.listing:
if listing.contract_hash == proto.contract_hash:
l.listing.remove(listing)
l.listing.extend([proto])
cursor.execute('''INSERT OR REPLACE INTO listings(id, serializedListings)
VALUES (?,?)''', (1, l.SerializeToString()))
self.db.commit()
def delete_listing(self, hash_value):
cursor = self.db.cursor()
ser = self.get_proto()
if ser is None:
return
l = Listings()
l.ParseFromString(ser)
for listing in l.listing:
if listing.contract_hash == hash_value:
l.listing.remove(listing)
cursor.execute('''INSERT OR REPLACE INTO listings(id, serializedListings)
VALUES (?,?)''', (1, l.SerializeToString()))
self.db.commit()
def delete_all_listings(self):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM listings''')
self.db.commit()
def get_proto(self):
cursor = self.db.cursor()
cursor.execute('''SELECT serializedListings FROM listings WHERE id = 1''')
ret = cursor.fetchone()
if ret is None:
return None
return ret[0]
class KeyStore(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def set_key(self, key_type, privkey, pubkey):
cursor = self.db.cursor()
cursor.execute('''INSERT OR REPLACE INTO keystore(type, privkey, pubkey)
VALUES (?,?,?)''', (key_type, privkey, pubkey))
self.db.commit()
def get_key(self, key_type):
cursor = self.db.cursor()
cursor.execute('''SELECT privkey, pubkey FROM keystore WHERE type=?''', (key_type,))
ret = cursor.fetchone()
if not ret:
return None
else:
return ret
def delete_all_keys(self):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM keystore''')
self.db.commit()
class FollowData(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def follow(self, proto):
cursor = self.db.cursor()
f = Following()
ser = self.get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == proto.guid:
f.users.remove(user)
f.users.extend([proto])
cursor.execute('''INSERT OR REPLACE INTO following(id, serializedFollowing) VALUES (?,?)''',
(1, f.SerializeToString()))
self.db.commit()
def unfollow(self, guid):
cursor = self.db.cursor()
f = Following()
ser = self.get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == guid:
f.users.remove(user)
cursor.execute('''INSERT OR REPLACE INTO following(id, serializedFollowing) VALUES (?,?)''',
(1, f.SerializeToString()))
self.db.commit()
def get_following(self):
cursor = self.db.cursor()
cursor.execute('''SELECT serializedFollowing FROM following WHERE id=1''')
ret = cursor.fetchall()
if not ret:
return None
else:
return ret[0][0]
def is_following(self, guid):
f = Following()
ser = self.get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == guid:
return True
return False
def set_follower(self, proto):
cursor = self.db.cursor()
f = Followers()
ser = self.get_followers()
if ser is not None:
for follower in f.followers:
if follower.guid == proto.guid:
f.followers.remove(follower)
f.followers.extend([proto])
cursor.execute('''INSERT OR REPLACE INTO followers(id, serializedFollowers) VALUES (?,?)''',
(1, f.SerializeToString()))
self.db.commit()
def delete_follower(self, guid):
cursor = self.db.cursor()
f = Followers()
ser = self.get_followers()
if ser is not None:
f.ParseFromString(ser)
for follower in f.followers:
if follower.guid == guid:
f.followers.remove(follower)
cursor.execute('''INSERT OR REPLACE INTO followers(id, serializedFollowers) VALUES (?,?)''',
(1, f.SerializeToString()))
self.db.commit()
def get_followers(self):
cursor = self.db.cursor()
cursor.execute('''SELECT serializedFollowers FROM followers WHERE id=1''')
proto = cursor.fetchone()
if not proto:
return None
else:
return proto[0]
class MessageStore(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def save_message(self, guid, handle, signed_pubkey, encryption_pubkey, subject,
message_type, message, timestamp, avatar_hash, signature, is_outgoing):
outgoing = 1 if is_outgoing else 0
cursor = self.db.cursor()
cursor.execute('''INSERT INTO messages(guid, handle, signed_pubkey, encryption_pubkey, subject,
message_type, message, timestamp, avatar_hash, signature, outgoing) VALUES (?,?,?,?,?,?,?,?,?,?,?)''',
(guid, handle, signed_pubkey, encryption_pubkey, subject, message_type,
message, timestamp, avatar_hash, signature, outgoing))
self.db.commit()
def get_messages(self, guid, message_type):
cursor = self.db.cursor()
cursor.execute('''SELECT guid, handle, signed_pubkey, encryption_pubkey, subject, message_type, message,
timestamp, avatar_hash, signature, outgoing FROM messages WHERE guid=? AND message_type=?''', (guid, message_type))
ret = cursor.fetchall()
if not ret:
return None
else:
return ret
def delete_message(self, guid):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM messages WHERE guid=? AND message_type="CHAT"''', (guid, ))
self.db.commit()
class NotificationStore(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def save_notification(self, guid, handle, message, timestamp, avatar_hash):
cursor = self.db.cursor()
cursor.execute('''INSERT INTO notifications(guid, handle, message, timestamp, avatar_hash)
VALUES (?,?,?,?,?)''', (guid, handle, message, timestamp, avatar_hash))
self.db.commit()
def get_notifications(self):
cursor = self.db.cursor()
cursor.execute('''SELECT guid, handle, message, timestamp, avatar_hash FROM notifications''')
ret = cursor.fetchall()
if not ret:
return None
else:
return ret
def delete_notfication(self, guid, timestamp):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM notifications WHERE guid=? AND timestamp=?''', (guid, timestamp))
self.db.commit()
class VendorStore(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def save_vendor(self, guid, ip, port, signed_pubkey):
cursor = self.db.cursor()
cursor.execute('''INSERT OR REPLACE INTO vendors(guid, ip, port, signedPubkey)
VALUES (?,?,?,?)''', (guid, ip, port, signed_pubkey))
self.db.commit()
def get_vendors(self):
cursor = self.db.cursor()
cursor.execute('''SELECT guid, ip, port, signedPubkey FROM vendors''')
ret = cursor.fetchall()
nodes = []
for n in ret:
node = Node(n[0], n[1], n[2], n[3], True)
nodes.append(node)
return nodes
def delete_vendor(self, guid):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM vendors WHERE guid=?''', (guid,))
self.db.commit()
| {
"repo_name": "the9ull/OpenBazaar-Server",
"path": "db/datastore.py",
"copies": "1",
"size": "12810",
"license": "mit",
"hash": 5390940786583150000,
"line_mean": 35.7048710602,
"line_max": 115,
"alpha_frac": 0.5954722873,
"autogenerated": false,
"ratio": 4.164499349804942,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.002176429534836157,
"num_lines": 349
} |
__author__ = 'chris'
import sqlite3 as lite
from constants import DATABASE
from protos.objects import Listings, Followers, Following
def create_database(filepath=None):
if filepath == None:
db = lite.connect(DATABASE)
else:
db = lite.connect(filepath)
cursor = db.cursor()
cursor.execute('''CREATE TABLE hashmap(hash BLOB PRIMARY KEY, filepath TEXT)''')
cursor.execute('''CREATE TABLE profile(id INTEGER PRIMARY KEY, serializedUserInfo BLOB)''')
cursor.execute('''CREATE TABLE listings(id INTEGER PRIMARY KEY, serializedListings BLOB)''')
cursor.execute('''CREATE TABLE keystore(type TEXT PRIMARY KEY, privkey BLOB, pubkey BLOB)''')
cursor.execute('''CREATE TABLE followers(id INTEGER PRIMARY KEY, serializedFollowers BLOB)''')
cursor.execute('''CREATE TABLE following(id INTEGER PRIMARY KEY, serializedFollowing BLOB)''')
cursor.execute('''CREATE TABLE messages(guid BLOB , handle TEXT, signed_pubkey BLOB,
encryption_pubkey BLOB, subject TEXT, message_type TEXT, message TEXT, timestamp, INTEGER,
avatar_hash BLOB, signature BLOB)''')
cursor.execute('''CREATE TABLE notifications(guid BLOB, handle TEXT, message TEXT,
timestamp INTEGER, avatar_hash BLOB)''')
db.commit()
class HashMap(object):
"""
Creates a table in the database for mapping file hashes (which are sent
over the wire in a query) with a more human readable filename in local
storage. This is useful for users who want to look through their store
data on disk.
"""
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def insert(self, hash_value, filepath):
cursor = self.db.cursor()
cursor.execute('''INSERT OR REPLACE INTO hashmap(hash, filepath)
VALUES (?,?)''', (hash_value, filepath))
self.db.commit()
def get_file(self, hash_value):
cursor = self.db.cursor()
cursor.execute('''SELECT filepath FROM hashmap WHERE hash=?''', (hash_value,))
ret = cursor.fetchone()
if ret is None:
return None
return ret[0]
def get_all(self):
cursor = self.db.cursor()
cursor.execute('''SELECT * FROM hashmap ''')
ret = cursor.fetchall()
return ret
def delete(self, hash_value):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM hashmap WHERE hash = ?''', (hash_value,))
self.db.commit()
def delete_all(self):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM hashmap''')
self.db.commit()
class ProfileStore(object):
"""
Stores the user's profile data in the db. The profile is stored as a serialized
Profile protobuf object. It's done this way because because protobuf is more
flexible and allows for storing custom repeated fields (like the SocialAccount
object). Also we will just serve this over the wire so we don't have to manually
rebuild it every startup. To interact with the profile you should use the
`market.profile` module and not this class directly.
"""
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def set_proto(self, proto):
cursor = self.db.cursor()
cursor.execute('''INSERT OR REPLACE INTO profile(id, serializedUserInfo)
VALUES (?,?)''', (1, proto))
self.db.commit()
def get_proto(self):
cursor = self.db.cursor()
cursor.execute('''SELECT serializedUserInfo FROM profile WHERE id = 1''')
ret = cursor.fetchone()
if ret is None:
return None
return ret[0]
class ListingsStore(object):
"""
Stores a serialized `Listings` protobuf object. It contains metadata for all the
contracts hosted by this store. We will send this in response to a GET_LISTING
query. This should be updated each time a new contract is created.
"""
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def add_listing(self, proto):
"""
Will also update an existing listing if the contract hash is the same.
"""
cursor = self.db.cursor()
l = Listings()
ser = self.get_proto()
if ser is not None:
l.ParseFromString(ser)
for listing in l.listing:
if listing.contract_hash == proto.contract_hash:
l.listing.remove(listing)
l.listing.extend([proto])
cursor.execute('''INSERT OR REPLACE INTO listings(id, serializedListings)
VALUES (?,?)''', (1, l.SerializeToString()))
self.db.commit()
def delete_listing(self, hash_value):
cursor = self.db.cursor()
ser = self.get_proto()
if ser is None:
return
l = Listings()
l.ParseFromString(ser)
for listing in l.listing:
if listing.contract_hash == hash_value:
l.listing.remove(listing)
cursor.execute('''INSERT OR REPLACE INTO listings(id, serializedListings)
VALUES (?,?)''', (1, l.SerializeToString()))
self.db.commit()
def delete_all_listings(self):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM listings''')
self.db.commit()
def get_proto(self):
cursor = self.db.cursor()
cursor.execute('''SELECT serializedListings FROM listings WHERE id = 1''')
ret = cursor.fetchone()
if ret is None:
return None
return ret[0]
class KeyStore(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def set_key(self, key_type, privkey, pubkey):
cursor = self.db.cursor()
cursor.execute('''INSERT OR REPLACE INTO keystore(type, privkey, pubkey)
VALUES (?,?,?)''', (key_type, privkey, pubkey))
self.db.commit()
def get_key(self, key_type):
cursor = self.db.cursor()
cursor.execute('''SELECT privkey, pubkey FROM keystore WHERE type=?''', (key_type,))
ret = cursor.fetchone()
if not ret:
return None
else:
return ret
def delete_all_keys(self):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM keystore''')
self.db.commit()
class FollowData(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def follow(self, proto):
cursor = self.db.cursor()
f = Following()
ser = self.get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == proto.guid:
f.users.remove(user)
f.users.extend([proto])
cursor.execute('''INSERT OR REPLACE INTO following(id, serializedFollowing) VALUES (?,?)''',
(1, f.SerializeToString()))
self.db.commit()
def unfollow(self, guid):
cursor = self.db.cursor()
f = Following()
ser = self.get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == guid:
f.users.remove(user)
cursor.execute('''INSERT OR REPLACE INTO following(id, serializedFollowing) VALUES (?,?)''',
(1, f.SerializeToString()))
self.db.commit()
def get_following(self):
cursor = self.db.cursor()
cursor.execute('''SELECT serializedFollowing FROM following WHERE id=1''')
ret = cursor.fetchall()
if not ret:
return None
else:
return ret[0][0]
def is_following(self, guid):
f = Following()
ser = self.get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == guid:
return True
return False
def set_follower(self, proto):
cursor = self.db.cursor()
f = Followers()
ser = self.get_followers()
if ser is not None:
for follower in f.followers:
if follower.guid == proto.guid:
f.followers.remove(follower)
f.followers.extend([proto])
cursor.execute('''INSERT OR REPLACE INTO followers(id, serializedFollowers) VALUES (?,?)''',
(1, f.SerializeToString()))
self.db.commit()
def delete_follower(self, guid):
cursor = self.db.cursor()
f = Followers()
ser = self.get_followers()
if ser is not None:
f.ParseFromString(ser)
for follower in f.followers:
if follower.guid == guid:
f.followers.remove(follower)
cursor.execute('''INSERT OR REPLACE INTO followers(id, serializedFollowers) VALUES (?,?)''',
(1, f.SerializeToString()))
self.db.commit()
def get_followers(self):
cursor = self.db.cursor()
cursor.execute('''SELECT serializedFollowers FROM followers WHERE id=1''')
proto = cursor.fetchone()
if not proto:
return None
else:
return proto[0]
class MessageStore(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def save_message(self, guid, handle, signed_pubkey, encryption_pubkey,
subject, message_type, message, timestamp, avatar_hash, signature):
cursor = self.db.cursor()
cursor.execute('''INSERT INTO messages(guid, handle, signed_pubkey, encryption_pubkey, subject,
message_type, message, timestamp, avatar_hash, signature) VALUES (?,?,?,?,?,?,?,?,?,?)''',
(guid, handle, signed_pubkey, encryption_pubkey, subject, message_type,
message, timestamp, avatar_hash, signature))
self.db.commit()
def get_messages(self, guid, message_type):
cursor = self.db.cursor()
cursor.execute('''SELECT guid, handle, signed_pubkey, encryption_pubkey, subject, message_type, message,
timestamp, avatar_hash, signature FROM messages WHERE guid=? AND message_type=?''', (guid, message_type))
ret = cursor.fetchall()
if not ret:
return None
else:
return ret
def delete_message(self, guid):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM messages WHERE guid=? AND message_type="CHAT"''', (guid, ))
self.db.commit()
class NotificationStore(object):
def __init__(self):
self.db = lite.connect(DATABASE)
self.db.text_factory = str
def save_notification(self, guid, handle, message, timestamp, avatar_hash):
cursor = self.db.cursor()
cursor.execute('''INSERT INTO notifications(guid, handle, message, timestamp, avatar_hash)
VALUES (?,?,?,?,?)''', (guid, handle, message, timestamp, avatar_hash))
self.db.commit()
def get_notifications(self):
cursor = self.db.cursor()
cursor.execute('''SELECT guid, handle, message, timestamp, avatar_hash FROM notifications''')
ret = cursor.fetchall()
if not ret:
return None
else:
return ret
def delete_notfication(self, guid, timestamp):
cursor = self.db.cursor()
cursor.execute('''DELETE FROM notifications WHERE guid=? AND timestamp=?''', (guid, timestamp))
self.db.commit()
| {
"repo_name": "eXcomm/OpenBazaar-Server",
"path": "db/datastore.py",
"copies": "2",
"size": "11621",
"license": "mit",
"hash": -673905914379458700,
"line_mean": 35.5440251572,
"line_max": 112,
"alpha_frac": 0.5945271491,
"autogenerated": false,
"ratio": 4.2319737800437,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0021772606102766724,
"num_lines": 318
} |
__author__ = 'chris'
import stun
import os
import sys
import dht.constants
from db.datastore import create_database
from twisted.internet import reactor
from twisted.python import log, logfile
from twisted.web.server import Site
from twisted.web.static import File
from keyutils.keys import KeyChain
from dht.network import Server
from dht.node import Node
from wireprotocol import OpenBazaarProtocol
from constants import DATA_FOLDER, DATABASE
from market import network
from market.listeners import MessageListenerImpl, NotificationListenerImpl
from ws import WSFactory, WSProtocol
from autobahn.twisted.websocket import listenWS
from restapi import OpenBazaarAPI
from dht.storage import PersistentStorage
# logging
logFile = logfile.LogFile.fromFullPath(DATA_FOLDER + "debug.log")
log.addObserver(log.FileLogObserver(logFile).emit)
log.startLogging(sys.stdout)
# stun
print "Finding NAT Type.."
response = stun.get_ip_info(stun_host="stun.l.google.com", source_port=18467, stun_port=19302)
print "%s on %s:%s" % (response[0], response[1], response[2])
ip_address = response[1]
port = response[2]
# database
if not os.path.isfile(DATABASE):
create_database()
# key generation
keys = KeyChain()
def on_bootstrap_complete(resp):
mlistener = MessageListenerImpl(ws_factory)
mserver.get_messages(mlistener)
mserver.protocol.add_listener(mlistener)
nlistener = NotificationListenerImpl(ws_factory)
mserver.protocol.add_listener(nlistener)
protocol = OpenBazaarProtocol((ip_address, port))
# kademlia
node = Node(keys.guid, ip_address, port, signed_pubkey=keys.guid_signed_pubkey)
if os.path.isfile(DATA_FOLDER + 'cache.pickle'):
kserver = Server.loadState(DATA_FOLDER + 'cache.pickle', ip_address, port, protocol,
on_bootstrap_complete, storage=PersistentStorage(DATABASE))
else:
kserver = Server(node, dht.constants.KSIZE, dht.constants.ALPHA, storage=PersistentStorage(DATABASE))
kserver.protocol.connect_multiplexer(protocol)
kserver.bootstrap(
kserver.querySeed("162.213.253.147:8080",
"5b56c8daeb3b37c8a9b47be6102fa43b9f069f58dcb57475984041b26c99e389"))\
.addCallback(on_bootstrap_complete)
kserver.saveStateRegularly(DATA_FOLDER + 'cache.pickle', 10)
protocol.register_processor(kserver.protocol)
# market
mserver = network.Server(kserver, keys.signing_key)
mserver.protocol.connect_multiplexer(protocol)
protocol.register_processor(mserver.protocol)
reactor.listenUDP(port, protocol)
# websockets api
ws_factory = WSFactory("ws://127.0.0.1:18466", mserver, kserver)
ws_factory.protocol = WSProtocol
ws_factory.setProtocolOptions(allowHixie76=True)
listenWS(ws_factory)
webdir = File(".")
web = Site(webdir)
reactor.listenTCP(9000, web, interface="127.0.0.1")
# rest api
api = OpenBazaarAPI(mserver, kserver, protocol)
site = Site(api, timeout=None)
reactor.listenTCP(18469, site, interface="127.0.0.1")
reactor.run()
| {
"repo_name": "hoffmabc/OpenBazaar-Server",
"path": "openbazaard.py",
"copies": "1",
"size": "2949",
"license": "mit",
"hash": -6808049444128088000,
"line_mean": 31.7666666667,
"line_max": 105,
"alpha_frac": 0.7653441845,
"autogenerated": false,
"ratio": 3.2513781697905184,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4516722354290519,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import sys
import argparse
import json
import time
from twisted.internet import reactor
from txjsonrpc.netstring.jsonrpc import Proxy
from binascii import hexlify, unhexlify
from dht.utils import digest
from txjsonrpc.netstring import jsonrpc
from market.profile import Profile
from protos import objects, countries
from db.datastore import HashMap
from keyutils.keys import KeyChain
from market.contracts import Contract
from collections import OrderedDict
from interfaces import MessageListener
from zope.interface import implements
from dht.node import Node
def do_continue(value):
pass
def print_value(value):
print json.dumps(value, indent=4)
reactor.stop()
def print_error(error):
print 'error', error
reactor.stop()
class Parser(object):
def __init__(self, proxy_obj):
parser = argparse.ArgumentParser(
description='OpenBazaar Network CLI',
usage='''
python networkcli.py command [<arguments>]
commands:
addsocialaccount add a social media account to the profile
addpgpkey add a pgp key to the profile
follow follow a user
unfollow unfollow a user
getinfo returns an object containing various state info
getpeers returns the id of all the peers in the routing table
get fetches the given keyword from the dht
set sets the given keyword/key in the dht
delete deletes the keyword/key from the dht
getnode returns a node's ip address given its guid.
getcontract fetchs a contract from a node given its hash and guid
getcontractmetadata fetches the metadata (including thumbnail image) for the contract
getimage fetches an image from a node given its hash and guid
getprofile fetches the profile from the given node.
getmoderators fetches a list of moderators
getusermetadata fetches the metadata (shortened profile) for the node
getlistings fetches metadata about the store's listings
getfollowers fetches a list of followers of a node
getfollowing fetches a list of users a node is following
getmessages fetches messages from the dht
sendnotification sends a notification to all your followers
setcontract sets a contract in the filesystem and db
setimage maps an image hash to a filepath in the db
setasmoderator sets a node as a moderator
setprofile sets the given profile data in the database
shutdown closes all outstanding connections.
''')
parser.add_argument('command', help='Execute the given command')
args = parser.parse_args(sys.argv[1:2])
if not hasattr(self, args.command):
parser.print_help()
exit(1)
getattr(self, args.command)()
self.proxy = proxy_obj
@staticmethod
def get():
parser = argparse.ArgumentParser(
description="Fetch the given keyword from the dht and return all the entries",
usage='''usage:
networkcli.py get [-kw KEYWORD]''')
parser.add_argument('-kw', '--keyword', required=True, help="the keyword to fetch")
args = parser.parse_args(sys.argv[2:])
keyword = args.keyword
d = proxy.callRemote('get', keyword)
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def set():
parser = argparse.ArgumentParser(
description='Set the given keyword/key pair in the dht. The value will be your '
'serialized node information.',
usage='''usage:
networkcli.py set [-kw KEYWORD] [-k KEY]''')
parser.add_argument('-kw', '--keyword', required=True, help="the keyword to set in the dht")
parser.add_argument('-k', '--key', required=True, help="the key to set at the keyword")
args = parser.parse_args(sys.argv[2:])
keyword = args.keyword
key = args.key
d = proxy.callRemote('set', keyword, key)
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def delete():
parser = argparse.ArgumentParser(
description="Deletes the given keyword/key from the dht. Signature will be automatically generated.",
usage='''usage:
networkcli.py delete [-kw KEYWORD] [-k KEY]''')
parser.add_argument('-kw', '--keyword', required=True, help="where to find the key")
parser.add_argument('-k', '--key', required=True, help="the key to delete")
args = parser.parse_args(sys.argv[2:])
keyword = args.keyword
key = args.key
d = proxy.callRemote('delete', keyword, key)
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def getinfo():
parser = argparse.ArgumentParser(
description="Returns an object containing various state info",
usage='''usage:
networkcli getinfo''')
parser.parse_args(sys.argv[2:])
d = proxy.callRemote('getinfo')
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def shutdown():
parser = argparse.ArgumentParser(
description="Terminates all outstanding connections.",
usage='''usage:
networkcli shutdown''')
parser.parse_args(sys.argv[2:])
d = proxy.callRemote('shutdown')
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def getpubkey():
parser = argparse.ArgumentParser(
description="Returns this node's public key.",
usage='''usage:
networkcli getpubkey''')
parser.parse_args(sys.argv[2:])
d = proxy.callRemote('getpubkey')
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def getcontract():
parser = argparse.ArgumentParser(
description="Fetch a contract given its hash and guid.",
usage='''usage:
networkcli.py getcontract [-c HASH] [-g GUID]''')
parser.add_argument('-c', '--hash', required=True, help="the hash of the contract")
parser.add_argument('-g', '--guid', required=True, help="the guid to query")
args = parser.parse_args(sys.argv[2:])
hash_value = args.hash
guid = args.guid
d = proxy.callRemote('getcontract', hash_value, guid)
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def getimage():
parser = argparse.ArgumentParser(
description="Fetch an image given its hash and guid.",
usage='''usage:
networkcli.py getcontract [-i HASH] [-g GUID]''')
parser.add_argument('-i', '--hash', required=True, help="the hash of the image")
parser.add_argument('-g', '--guid', required=True, help="the guid to query")
args = parser.parse_args(sys.argv[2:])
hash_value = args.hash
guid = args.guid
d = proxy.callRemote('getimage', hash_value, guid)
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def getpeers():
parser = argparse.ArgumentParser(
description="Returns id of all peers in the routing table",
usage='''usage:
networkcli getpeers''')
parser.parse_args(sys.argv[2:])
d = proxy.callRemote('getpeers')
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def getnode():
parser = argparse.ArgumentParser(
description="Fetch the ip address for a node given its guid.",
usage='''usage:
networkcli.py getnode [-g GUID]''')
parser.add_argument('-g', '--guid', required=True, help="the guid to find")
args = parser.parse_args(sys.argv[2:])
guid = args.guid
d = proxy.callRemote('getnode', guid)
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def setprofile():
parser = argparse.ArgumentParser(
description="Sets a profile in the database.",
usage='''usage:
networkcli.py setprofile [options]''')
parser.add_argument('-n', '--name', help="the name of the user/store")
parser.add_argument('-o', '--onename', help="the onename id")
parser.add_argument('-a', '--avatar', help="the file path to the avatar image")
parser.add_argument('-hd', '--header', help="the file path to the header image")
parser.add_argument('-c', '--country',
help="a string consisting of country from protos.countries.CountryCode")
# we could add all the fields here but this is good enough to test.
args = parser.parse_args(sys.argv[2:])
p = Profile()
u = objects.Profile()
h = HashMap()
if args.name is not None:
u.name = args.name
if args.country is not None:
u.location = countries.CountryCode.Value(args.country.upper())
if args.onename is not None:
u.handle = args.onename
if args.avatar is not None:
with open(args.avatar, "r") as filename:
image = filename.read()
hash_value = digest(image)
u.avatar_hash = hash_value
h.insert(hash_value, args.avatar)
if args.header is not None:
with open(args.header, "r") as filename:
image = filename.read()
hash_value = digest(image)
u.header_hash = hash_value
h.insert(hash_value, args.header)
u.encryption_key = KeyChain().encryption_pubkey
p.update(u)
@staticmethod
def addpgpkey():
parser = argparse.ArgumentParser(
description="Add a pgp key to the profile.",
usage='''usage:
networkcli.py addpgpkey -k KEY, -s SIGNATURE''')
parser.add_argument('-k', '--key', help="path to the key file")
parser.add_argument('-s', '--signature', help="path to the signature file")
args = parser.parse_args(sys.argv[2:])
with open(args.key, "r") as filename:
key = filename.read()
with open(args.signature, "r") as filename:
sig = filename.read()
p = Profile()
print p.add_pgp_key(key, sig, KeyChain().guid.encode("hex"))
@staticmethod
def addsocialaccount():
parser = argparse.ArgumentParser(
description="Add a social media account to the profile.",
usage='''usage:
networkcli.py addsocialaccout -t TYPE, -u USERNAME, -p PROOF''')
parser.add_argument('-t', '--type', help="the type of account")
parser.add_argument('-u', '--username', help="the username")
parser.add_argument('-p', '--proof', help="the proof url")
args = parser.parse_args(sys.argv[2:])
p = Profile()
p.add_social_account(args.type, args.username, args.proof)
@staticmethod
def getprofile():
parser = argparse.ArgumentParser(
description="Fetch the profile from the given node. Images will be saved in cache.",
usage='''usage:
networkcli.py getprofile [-g GUID]''')
parser.add_argument('-g', '--guid', required=True, help="the guid to query")
args = parser.parse_args(sys.argv[2:])
guid = args.guid
d = proxy.callRemote('getprofile', guid)
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def getusermetadata():
parser = argparse.ArgumentParser(
description="Fetches the metadata (small profile) from"
"a given node. The images will be saved in cache.",
usage='''usage:
networkcli.py getusermetadata [-g GUID]''')
parser.add_argument('-g', '--guid', required=True, help="the guid to query")
args = parser.parse_args(sys.argv[2:])
guid = args.guid
d = proxy.callRemote('getusermetadata', guid)
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def setcontract():
parser = argparse.ArgumentParser(
description="Sets a new contract in the database and filesystem.",
usage='''usage:
networkcli.py setcontract [-f FILEPATH]''')
parser.add_argument('-f', '--filepath', help="a path to a completed json contract")
args = parser.parse_args(sys.argv[2:])
with open(args.filepath) as data_file:
contract = json.load(data_file, object_pairs_hook=OrderedDict)
Contract(contract).save()
@staticmethod
def setimage():
parser = argparse.ArgumentParser(
description="Maps a image hash to a file path in the database",
usage='''usage:
networkcli.py setimage [-f FILEPATH]''')
parser.add_argument('-f', '--filepath', help="a path to the image")
args = parser.parse_args(sys.argv[2:])
with open(args.filepath, "r") as f:
image = f.read()
d = digest(image)
h = HashMap()
h.insert(d, args.filepath)
print h.get_file(d)
@staticmethod
def getlistings():
parser = argparse.ArgumentParser(
description="Fetches metadata about the store's listings",
usage='''usage:
networkcli.py getmetadata [-g GUID]''')
parser.add_argument('-g', '--guid', required=True, help="the guid to query")
args = parser.parse_args(sys.argv[2:])
guid = args.guid
d = proxy.callRemote('getlistings', guid)
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def getcontractmetadata():
parser = argparse.ArgumentParser(
description="Fetches the metadata for the given contract. The thumbnail images will be saved in cache.",
usage='''usage:
networkcli.py getcontractmetadata [-g GUID] [-c CONTRACT]''')
parser.add_argument('-g', '--guid', required=True, help="the guid to query")
parser.add_argument('-c', '--contract', required=True, help="the contract hash")
args = parser.parse_args(sys.argv[2:])
guid = args.guid
contract = args.contract
d = proxy.callRemote('getcontractmetadata', guid, contract)
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def setasmoderator():
parser = argparse.ArgumentParser(
description="Sets the given node as a moderator.",
usage='''usage:
networkcli.py setasmoderator''')
parser.parse_args(sys.argv[2:])
d = proxy.callRemote('setasmoderator')
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def getmoderators():
parser = argparse.ArgumentParser(
description="Fetches a list of moderators",
usage='''usage:
networkcli.py getmoderators ''')
parser.parse_args(sys.argv[2:])
d = proxy.callRemote('getmoderators')
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def follow():
parser = argparse.ArgumentParser(
description="Follow a user",
usage='''usage:
networkcli.py follow [-g GUID]''')
parser.add_argument('-g', '--guid', required=True, help="the guid to follow")
args = parser.parse_args(sys.argv[2:])
guid = args.guid
d = proxy.callRemote('follow', guid)
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def unfollow():
parser = argparse.ArgumentParser(
description="Unfollow a user",
usage='''usage:
networkcli.py unfollow [-g GUID]''')
parser.add_argument('-g', '--guid', required=True, help="the guid to unfollow")
args = parser.parse_args(sys.argv[2:])
guid = args.guid
d = proxy.callRemote('unfollow', guid)
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def getfollowers():
parser = argparse.ArgumentParser(
description="Get a list of followers of a node",
usage='''usage:
networkcli.py getfollowers [-g GUID]''')
parser.add_argument('-g', '--guid', required=True, help="the guid to query")
args = parser.parse_args(sys.argv[2:])
guid = args.guid
d = proxy.callRemote('getfollowers', guid)
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def getfollowing():
parser = argparse.ArgumentParser(
description="Get a list users a node is following",
usage='''usage:
networkcli.py getfollowing [-g GUID]''')
parser.add_argument('-g', '--guid', required=True, help="the guid to query")
args = parser.parse_args(sys.argv[2:])
guid = args.guid
d = proxy.callRemote('getfollowing', guid)
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def sendnotification():
parser = argparse.ArgumentParser(
description="Send a notification to all your followers",
usage='''usage:
networkcli.py sendnotification [-m MESSAGE]''')
parser.add_argument('-m', '--message', required=True, help="the message to send")
args = parser.parse_args(sys.argv[2:])
message = args.message
d = proxy.callRemote('sendnotification', message)
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def sendmessage():
parser = argparse.ArgumentParser(
description="Send a message to another node",
usage='''usage:
networkcli.py sendmessage [-g GUID] [-p PUBKEY] [-m MESSAGE] [-o]''')
parser.add_argument('-g', '--guid', required=True, help="the guid to send to")
parser.add_argument('-p', '--pubkey', required=True, help="the encryption key of the node")
parser.add_argument('-m', '--message', required=True, help="the message to send")
parser.add_argument('-o', '--offline', action='store_true', help="sends to offline recipient")
args = parser.parse_args(sys.argv[2:])
message = args.message
guid = args.guid
pubkey = args.pubkey
offline = args.offline
d = proxy.callRemote('sendmessage', guid, pubkey, message, offline)
d.addCallbacks(print_value, print_error)
reactor.run()
@staticmethod
def getmessages():
parser = argparse.ArgumentParser(
description="Get messages from the dht",
usage='''usage:
networkcli.py getmessages''')
parser.parse_args(sys.argv[2:])
d = proxy.callRemote('getmessages')
d.addCallbacks(print_value, print_error)
reactor.run()
# RPC-Server
class RPCCalls(jsonrpc.JSONRPC):
def __init__(self, kserver, mserver, keys):
jsonrpc.JSONRPC.__init__(self)
self.kserver = kserver
self.mserver = mserver
self.keys = keys
def jsonrpc_getpubkey(self):
return hexlify(self.keys.guid_signed_pubkey)
def jsonrpc_getinfo(self):
info = {"version": "0.1"}
num_peers = 0
for bucket in self.kserver.protocol.router.buckets:
num_peers += bucket.__len__()
info["known peers"] = num_peers
info["stored messages"] = len(self.kserver.storage.data)
size = sys.getsizeof(self.kserver.storage.data)
size += sum(map(sys.getsizeof, self.kserver.storage.data.itervalues())) + sum(
map(sys.getsizeof, self.kserver.storage.data.iterkeys()))
info["db size"] = size
return info
def jsonrpc_set(self, keyword, key):
def handle_result(result):
print "JSONRPC result:", result
d = self.kserver.set(str(keyword), unhexlify(key), self.kserver.node.getProto().SerializeToString())
d.addCallback(handle_result)
return "Sending store request..."
def jsonrpc_get(self, keyword):
def handle_result(result):
print "JSONRPC result:", result
for mod in result:
try:
val = objects.Value()
val.ParseFromString(mod)
node = objects.Node()
node.ParseFromString(val.serializedData)
print node
except Exception as e:
print 'malformed protobuf', e.message
d = self.kserver.get(keyword)
d.addCallback(handle_result)
return "Sent get request. Check log output for result"
def jsonrpc_delete(self, keyword, key):
def handle_result(result):
print "JSONRPC result:", result
signature = self.keys.signing_key.sign(digest(key))
d = self.kserver.delete(str(keyword), digest(key), signature[:64])
d.addCallback(handle_result)
return "Sending delete request..."
def jsonrpc_shutdown(self):
for addr in self.kserver.protocol:
connection = self.kserver.protocol._active_connections.get(addr)
if connection is not None:
connection.shutdown()
return "Closing all connections."
def jsonrpc_getpeers(self):
peers = []
for bucket in self.kserver.protocol.router.buckets:
for node in bucket.getNodes():
peers.append(node.id.encode("hex"))
return peers
def jsonrpc_getnode(self, guid):
def print_node(node):
print node.ip, node.port
d = self.kserver.resolve(unhexlify(guid))
d.addCallback(print_node)
return "finding node..."
def jsonrpc_getcontract(self, contract_hash, guid):
def get_node(node):
def print_resp(resp):
print resp
if node is not None:
d = self.mserver.get_contract(node, unhexlify(contract_hash))
d.addCallback(print_resp)
d = self.kserver.resolve(unhexlify(guid))
d.addCallback(get_node)
return "getting contract..."
def jsonrpc_getimage(self, image_hash, guid):
def get_node(node):
def print_resp(resp):
print resp
if node is not None:
d = self.mserver.get_image(node, unhexlify(image_hash))
d.addCallback(print_resp)
d = self.kserver.resolve(unhexlify(guid))
d.addCallback(get_node)
return "getting image..."
def jsonrpc_getprofile(self, guid):
start = time.time()
def get_node(node):
def print_resp(resp):
print time.time() - start
print resp
print hexlify(resp.encryption_key)
if node is not None:
d = self.mserver.get_profile(node)
d.addCallback(print_resp)
d = self.kserver.resolve(unhexlify(guid))
d.addCallback(get_node)
return "getting profile..."
def jsonrpc_getusermetadata(self, guid):
start = time.time()
def get_node(node):
def print_resp(resp):
print time.time() - start
print resp
if node is not None:
d = self.mserver.get_user_metadata(node)
d.addCallback(print_resp)
d = self.kserver.resolve(unhexlify(guid))
d.addCallback(get_node)
return "getting user metadata..."
def jsonrpc_getlistings(self, guid):
start = time.time()
def get_node(node):
def print_resp(resp):
print time.time() - start
if resp:
for l in resp.listing:
resp.listing.remove(l)
h = l.contract_hash
l.contract_hash = hexlify(h)
resp.listing.extend([l])
print resp
if node is not None:
d = self.mserver.get_listings(node)
d.addCallback(print_resp)
d = self.kserver.resolve(unhexlify(guid))
d.addCallback(get_node)
return "getting listing metadata..."
def jsonrpc_getcontractmetadata(self, guid, contract_hash):
start = time.time()
def get_node(node):
def print_resp(resp):
print time.time() - start
print resp
if node is not None:
d = self.mserver.get_contract_metadata(node, unhexlify(contract_hash))
d.addCallback(print_resp)
d = self.kserver.resolve(unhexlify(guid))
d.addCallback(get_node)
return "getting contract metadata..."
def jsonrpc_setasmoderator(self):
self.mserver.make_moderator()
def jsonrpc_getmoderators(self):
def print_mods(mods):
print mods
self.mserver.get_moderators().addCallback(print_mods)
return "finding moderators in dht..."
def jsonrpc_follow(self, guid):
def get_node(node):
if node is not None:
def print_resp(resp):
print resp
d = self.mserver.follow(node)
d.addCallback(print_resp)
d = self.kserver.resolve(unhexlify(guid))
d.addCallback(get_node)
return "following node..."
def jsonrpc_unfollow(self, guid):
def get_node(node):
if node is not None:
def print_resp(resp):
print resp
d = self.mserver.unfollow(node)
d.addCallback(print_resp)
d = self.kserver.resolve(unhexlify(guid))
d.addCallback(get_node)
return "unfollowing node..."
def jsonrpc_getfollowers(self, guid):
def get_node(node):
if node is not None:
def print_resp(resp):
print resp
d = self.mserver.get_followers(node)
d.addCallback(print_resp)
d = self.kserver.resolve(unhexlify(guid))
d.addCallback(get_node)
return "getting followers..."
def jsonrpc_getfollowing(self, guid):
def get_node(node):
if node is not None:
def print_resp(resp):
print resp
d = self.mserver.get_following(node)
d.addCallback(print_resp)
d = self.kserver.resolve(unhexlify(guid))
d.addCallback(get_node)
return "getting following..."
def jsonrpc_sendnotification(self, message):
def get_count(count):
print "Notification reached %i follower(s)" % count
d = self.mserver.send_notification(message)
d.addCallback(get_count)
return "sendng notification..."
def jsonrpc_sendmessage(self, guid, pubkey, message, offline=False):
def get_node(node):
if node is not None or offline is True:
if offline is True:
node = Node(unhexlify(guid), "127.0.0.1", 1234, digest("adsf"))
self.mserver.send_message(node, pubkey, objects.Plaintext_Message.CHAT, message)
d = self.kserver.resolve(unhexlify(guid))
d.addCallback(get_node)
return "sending message..."
def jsonrpc_getmessages(self):
class GetMyMessages(object):
implements(MessageListener)
@staticmethod
def notify(sender_guid, encryption_pubkey, subject, message_type, message):
print message
self.mserer.get_messages(GetMyMessages())
return "getting messages..."
if __name__ == "__main__":
proxy = Proxy('127.0.0.1', 18465)
Parser(proxy)
| {
"repo_name": "the9ull/OpenBazaar-Server",
"path": "networkcli.py",
"copies": "1",
"size": "27857",
"license": "mit",
"hash": -3815123287889230000,
"line_mean": 37.5297372061,
"line_max": 116,
"alpha_frac": 0.5947876656,
"autogenerated": false,
"ratio": 4.201025486351983,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5295813151951982,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import sys
import argparse
import json
from twisted.internet import reactor
from txjsonrpc.netstring.jsonrpc import Proxy
def do_continue(value):
pass
def print_value(value):
print json.dumps(value, indent=4)
reactor.stop()
def print_error(error):
print 'error', error
reactor.stop()
class Parser(object):
def __init__(self, proxy):
parser = argparse.ArgumentParser(
description='OpenBazaar Network CLI',
usage='''
python network-cli.py command [<arguments>]
commands:
getinfo returns an object containing various state info
getpeers returns the id of all the peers in the routing table
get fetches the given keyword from the dht
set sets the given keyword/key in the dht
delete deletes the keyword/key from the dht
getnode returns a node's ip address given its guid.
getcontract fetchs a contract from a node given its hash and guid
shutdown closes all outstanding connections.
''')
parser.add_argument('command', help='Execute the given command')
args = parser.parse_args(sys.argv[1:2])
if not hasattr(self, args.command):
parser.print_help()
exit(1)
getattr(self, args.command)()
self.proxy = proxy
def get(self):
parser = argparse.ArgumentParser(
description="Fetch the given keyword from the dht and return all the entries",
usage='''usage:
network-cli.py get [-kw KEYWORD]''')
parser.add_argument('-kw', '--keyword', required=True, help="the keyword to fetch")
args = parser.parse_args(sys.argv[2:])
keyword = args.keyword
d = proxy.callRemote('get', keyword)
d.addCallbacks(print_value, print_error)
reactor.run()
def set(self):
parser = argparse.ArgumentParser(
description='Set the given keyword/key pair in the dht. The value will be your '
'serialized node information.',
usage='''usage:
network-cli.py set [-kw KEYWORD] [-k KEY]''')
parser.add_argument('-kw', '--keyword', required=True, help="the keyword to set in the dht")
parser.add_argument('-k', '--key', required=True, help="the key to set at the keyword")
args = parser.parse_args(sys.argv[2:])
keyword = args.keyword
key = args.key
d = proxy.callRemote('set', keyword, key)
d.addCallbacks(print_value, print_error)
reactor.run()
def delete(self):
parser = argparse.ArgumentParser(
description="Deletes the given keyword/key from the dht. Signature will be automatically generated.",
usage='''usage:
network-cli.py delete [-kw KEYWORD] [-k KEY]''')
parser.add_argument('-kw', '--keyword', required=True, help="where to find the key")
parser.add_argument('-k', '--key', required=True, help="the key to delete")
args = parser.parse_args(sys.argv[2:])
keyword = args.keyword
key = args.key
d = proxy.callRemote('delete', keyword, key)
d.addCallbacks(print_value, print_error)
reactor.run()
def getinfo(self):
parser = argparse.ArgumentParser(
description="Returns an object containing various state info",
usage='''usage:
network-cli getinfo''')
args = parser.parse_args(sys.argv[2:])
d = proxy.callRemote('getinfo')
d.addCallbacks(print_value, print_error)
reactor.run()
def shutdown(self):
parser = argparse.ArgumentParser(
description="Terminates all outstanding connections.",
usage='''usage:
network-cli shutdown''')
args = parser.parse_args(sys.argv[2:])
d = proxy.callRemote('shutdown')
d.addCallbacks(print_value, print_error)
reactor.run()
def getpubkey(self):
parser = argparse.ArgumentParser(
description="Returns this node's public key.",
usage='''usage:
network-cli getpubkey''')
args = parser.parse_args(sys.argv[2:])
d = proxy.callRemote('getpubkey')
d.addCallbacks(print_value, print_error)
reactor.run()
def getcontract(self):
parser = argparse.ArgumentParser(
description="Fetch a contract given its hash and guid.",
usage='''usage:
network-cli.py getcontract [-h HASH] [-g GUID]''')
parser.add_argument('-h', '--hash', required=True, help="the keyword to fetch")
parser.add_argument('-g', '--guid', required=True, help="the keyword to fetch")
args = parser.parse_args(sys.argv[2:])
hash = args.hash
guid = args.guid
d = proxy.callRemote('getcontract', hash, guid)
d.addCallbacks(print_value, print_error)
reactor.run()
def getpeers(self):
parser = argparse.ArgumentParser(
description="Returns id of all peers in the routing table",
usage='''usage:
network-cli getpeers''')
args = parser.parse_args(sys.argv[2:])
d = proxy.callRemote('getpeers')
d.addCallbacks(print_value, print_error)
reactor.run()
def getnode(self):
parser = argparse.ArgumentParser(
description="Fetch the ip address for a node given its guid.",
usage='''usage:
network-cli.py getnode [-g GUID]''')
parser.add_argument('-g', '--guid', required=True, help="the keyword to fetch")
args = parser.parse_args(sys.argv[2:])
guid = args.guid
d = proxy.callRemote('getnode', guid)
d.addCallbacks(print_value, print_error)
reactor.run()
proxy = Proxy('127.0.0.1', 18465)
Parser(proxy) | {
"repo_name": "jorik041/Network",
"path": "network-cli.py",
"copies": "1",
"size": "5804",
"license": "mit",
"hash": 249355696187111900,
"line_mean": 35.974522293,
"line_max": 113,
"alpha_frac": 0.6088904204,
"autogenerated": false,
"ratio": 4.139800285306705,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5248690705706704,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import sys
import os
import pickle
import json
import random
from twisted.application import service, internet
from twisted.python.log import ILogObserver
from twisted.internet import task
from twisted.web import resource, server
from binascii import hexlify
from random import shuffle
import stun
import nacl.signing
import nacl.hash
import nacl.encoding
from seed import peers
from guidutils.guid import GUID
import log
from dht.node import Node
from dht.network import Server
from dht.crawling import NodeSpiderCrawl
from dht.utils import digest, deferredDict
from protos import objects
from wireprotocol import OpenBazaarProtocol
sys.path.append(os.path.dirname(__file__))
application = service.Application("OpenBazaar_seed_server")
application.setComponent(ILogObserver, log.FileLogObserver(sys.stdout, log.INFO).emit)
# Load the keys
if os.path.isfile('keys.pickle'):
keys = pickle.load(open("keys.pickle", "r"))
g = keys["guid"]
signing_key_hex = keys["signing_privkey"]
signing_key = nacl.signing.SigningKey(signing_key_hex, encoder=nacl.encoding.HexEncoder)
else:
print "Generating GUID, stand by..."
g = GUID()
signing_key = nacl.signing.SigningKey.generate()
keys = {
'guid': g,
'signing_privkey': signing_key.encode(encoder=nacl.encoding.HexEncoder),
'signing_pubkey': signing_key.verify_key.encode(encoder=nacl.encoding.HexEncoder)
}
pickle.dump(keys, open("keys.pickle", "wb"))
# Stun
response = stun.get_ip_info(stun_host="stun.l.google.com", source_port=0, stun_port=19302)
ip_address = response[1]
port = 18467
# Start the kademlia server
this_node = Node(g.guid, ip_address, port, g.signed_pubkey)
protocol = OpenBazaarProtocol((ip_address, port))
if os.path.isfile('cache.pickle'):
kserver = Server.loadState('cache.pickle', ip_address, port, protocol)
else:
kserver = Server(this_node)
kserver.protocol.connect_multiplexer(protocol)
protocol.register_processor(kserver.protocol)
kserver.saveStateRegularly('cache.pickle', 10)
udpserver = internet.UDPServer(18467, protocol)
udpserver.setServiceParent(application)
class WebResource(resource.Resource):
def __init__(self, kserver):
resource.Resource.__init__(self)
self.kserver = kserver
self.nodes = {}
for bucket in self.kserver.protocol.router.buckets:
for node in bucket.getNodes():
self.nodes[node.id] = node
self.nodes[this_node.id] = this_node
loopingCall = task.LoopingCall(self.crawl)
loopingCall.start(60, True)
def crawl(self):
def gather_results(result):
for proto in result:
n = objects.Node()
try:
n.ParseFromString(proto)
node = Node(n.guid, n.ip, n.port, n.signedPublicKey, n.vendor)
if node.id not in self.nodes:
self.nodes[node.id] = node
except:
pass
def start_crawl(results):
for node, result in results.items():
if not result[0]:
del self.nodes[node.id]
node = Node(digest(random.getrandbits(255)))
nearest = self.kserver.protocol.router.findNeighbors(node)
spider = NodeSpiderCrawl(self.kserver.protocol, node, nearest, 100, 4)
d = spider.find().addCallback(gather_results)
ds = {}
for bucket in self.kserver.protocol.router.buckets:
for node in bucket.getNodes():
if node.id not in self.nodes:
self.nodes[node.id] = node
for node in self.nodes.values():
if node.id != this_node.id:
ds[node] = self.kserver.protocol.callPing(node)
deferredDict(ds).addCallback(start_crawl)
def getChild(self, child, request):
return self
def render_GET(self, request):
nodes = self.nodes.values()
shuffle(nodes)
log.msg("Received a request for nodes, responding...")
if "format" in request.args:
if request.args["format"][0] == "json":
json_list = []
if "type" in request.args and request.args["type"][0] == "vendors":
print "getting list of vendors"
for node in nodes:
if node.vendor is True:
print "found vendor"
node_dic = {}
node_dic["ip"] = node.ip
node_dic["port"] = node.port
json_list.append(node_dic)
sig = signing_key.sign(str(json_list))
resp = {"peers" : json_list, "signature" : hexlify(sig[:64])}
request.write(json.dumps(resp, indent=4))
else:
for node in nodes[:50]:
node_dic = {}
node_dic["ip"] = node.ip
node_dic["port"] = node.port
json_list.append(node_dic)
sig = signing_key.sign(str(json_list))
resp = {"peers" : json_list, "signature" : hexlify(sig[:64])}
request.write(json.dumps(resp, indent=4))
elif request.args["format"][0] == "protobuf":
proto = peers.PeerSeeds()
for node in nodes[:50]:
peer = peers.PeerData()
peer.ip_address = node.ip
peer.port = node.port
peer.vendor = node.vendor
proto.peer_data.append(peer.SerializeToString())
sig = signing_key.sign("".join(proto.peer_data))
proto.signature = sig
uncompressed_data = proto.SerializeToString()
request.write(uncompressed_data.encode("zlib"))
else:
proto = peers.PeerSeeds()
if "type" in request.args and request.args["type"][0] == "vendors":
for node in nodes:
if node.vendor is True:
peer = peers.PeerData()
peer.ip_address = node.ip
peer.port = node.port
peer.vendor = node.vendor
proto.peer_data.append(peer.SerializeToString())
sig = signing_key.sign("".join(proto.peer_data))
proto.signature = sig
uncompressed_data = proto.SerializeToString()
request.write(uncompressed_data.encode("zlib"))
else:
for node in nodes[:50]:
peer = peers.PeerData()
peer.ip_address = node.ip
peer.port = node.port
peer.vendor = node.vendor
proto.peer_data.append(peer.SerializeToString())
sig = signing_key.sign("".join(proto.peer_data))
proto.signature = sig
uncompressed_data = proto.SerializeToString()
request.write(uncompressed_data.encode("zlib"))
request.finish()
return server.NOT_DONE_YET
server_protocol = server.Site(WebResource(kserver))
seed_server = internet.TCPServer(8080, server_protocol)
seed_server.setServiceParent(application) | {
"repo_name": "jorik041/Network",
"path": "seed/httpseed.py",
"copies": "1",
"size": "7421",
"license": "mit",
"hash": -1683405836667646500,
"line_mean": 38.0631578947,
"line_max": 93,
"alpha_frac": 0.5721600862,
"autogenerated": false,
"ratio": 4.08870523415978,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.516086532035978,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import sys
import pickle
import json
import random
from binascii import hexlify
from random import shuffle
import os
from twisted.application import service, internet
from twisted.python.log import ILogObserver
from twisted.internet import task
from twisted.web import resource, server
import stun
import nacl.signing
import nacl.hash
import nacl.encoding
from seed import peers
import log
from dht.node import Node
from dht.network import Server
from dht.crawling import NodeSpiderCrawl
from dht.utils import digest, deferredDict
from protos import objects
from wireprotocol import OpenBazaarProtocol
from market import network
from keyutils.keys import KeyChain
sys.path.append(os.path.dirname(__file__))
application = service.Application("OpenBazaar_seed_server")
application.setComponent(ILogObserver, log.FileLogObserver(sys.stdout, log.INFO).emit)
# Load the keys
keychain = KeyChain()
if os.path.isfile('keys.pickle'):
keys = pickle.load(open("keys.pickle", "r"))
signing_key_hex = keys["signing_privkey"]
signing_key = nacl.signing.SigningKey(signing_key_hex, encoder=nacl.encoding.HexEncoder)
else:
signing_key = nacl.signing.SigningKey.generate()
keys = {
'signing_privkey': signing_key.encode(encoder=nacl.encoding.HexEncoder),
'signing_pubkey': signing_key.verify_key.encode(encoder=nacl.encoding.HexEncoder)
}
pickle.dump(keys, open("keys.pickle", "wb"))
# Stun
response = stun.get_ip_info(stun_host="stun.l.google.com", source_port=0, stun_port=19302)
ip_address = response[1]
port = 18467
# Start the kademlia server
this_node = Node(keychain.guid, ip_address, port, keychain.guid_signed_pubkey)
protocol = OpenBazaarProtocol((ip_address, port))
if os.path.isfile('cache.pickle'):
kserver = Server.loadState('cache.pickle', ip_address, port, protocol)
else:
kserver = Server(this_node)
kserver.protocol.connect_multiplexer(protocol)
protocol.register_processor(kserver.protocol)
kserver.saveStateRegularly('cache.pickle', 10)
# start the market server
mserver = network.Server(kserver, keychain.signing_key)
mserver.protocol.connect_multiplexer(protocol)
protocol.register_processor(mserver.protocol)
udpserver = internet.UDPServer(18467, protocol)
udpserver.setServiceParent(application)
class WebResource(resource.Resource):
def __init__(self, kserver_r):
resource.Resource.__init__(self)
self.kserver = kserver_r
self.nodes = {}
for bucket in self.kserver.protocol.router.buckets:
for node in bucket.getNodes():
self.nodes[node.id] = node
self.nodes[this_node.id] = this_node
loopingCall = task.LoopingCall(self.crawl)
loopingCall.start(60, True)
def crawl(self):
def gather_results(result):
for proto in result:
n = objects.Node()
try:
n.ParseFromString(proto)
node = Node(n.guid, n.ip, n.port, n.signedPublicKey, n.vendor)
if node.id not in self.nodes:
self.nodes[node.id] = node
except Exception:
pass
def start_crawl(results):
for node, result in results.items():
if not result[0]:
del self.nodes[node.id]
node = Node(digest(random.getrandbits(255)))
nearest = self.kserver.protocol.router.findNeighbors(node)
spider = NodeSpiderCrawl(self.kserver.protocol, node, nearest, 100, 4)
spider.find().addCallback(gather_results)
ds = {}
for bucket in self.kserver.protocol.router.buckets:
for node in bucket.getNodes():
if node.id not in self.nodes:
self.nodes[node.id] = node
for node in self.nodes.values():
if node.id != this_node.id:
ds[node] = self.kserver.protocol.callPing(node)
deferredDict(ds).addCallback(start_crawl)
def getChild(self, child, request):
return self
def render_GET(self, request):
nodes = self.nodes.values()
shuffle(nodes)
log.msg("Received a request for nodes, responding...")
if "format" in request.args:
if request.args["format"][0] == "json":
json_list = []
if "type" in request.args and request.args["type"][0] == "vendors":
print "getting list of vendors"
for node in nodes:
if node.vendor is True:
print "found vendor"
node_dic = {}
node_dic["ip"] = node.ip
node_dic["port"] = node.port
json_list.append(node_dic)
sig = signing_key.sign(str(json_list))
resp = {"peers": json_list, "signature": hexlify(sig[:64])}
request.write(json.dumps(resp, indent=4))
else:
for node in nodes[:50]:
node_dic = {}
node_dic["ip"] = node.ip
node_dic["port"] = node.port
json_list.append(node_dic)
sig = signing_key.sign(str(json_list))
resp = {"peers": json_list, "signature": hexlify(sig[:64])}
request.write(json.dumps(resp, indent=4))
elif request.args["format"][0] == "protobuf":
proto = peers.PeerSeeds()
for node in nodes[:50]:
peer = peers.PeerData()
peer.ip_address = node.ip
peer.port = node.port
peer.vendor = node.vendor
proto.peer_data.append(peer.SerializeToString())
sig = signing_key.sign("".join(proto.peer_data))[:64]
proto.signature = sig
uncompressed_data = proto.SerializeToString()
request.write(uncompressed_data.encode("zlib"))
else:
proto = peers.PeerSeeds()
if "type" in request.args and request.args["type"][0] == "vendors":
for node in nodes:
if node.vendor is True:
peer = peers.PeerData()
peer.ip_address = node.ip
peer.port = node.port
peer.vendor = node.vendor
proto.peer_data.append(peer.SerializeToString())
sig = signing_key.sign("".join(proto.peer_data))[:64]
proto.signature = sig
uncompressed_data = proto.SerializeToString()
request.write(uncompressed_data.encode("zlib"))
else:
for node in nodes[:50]:
peer = peers.PeerData()
peer.ip_address = node.ip
peer.port = node.port
peer.vendor = node.vendor
proto.peer_data.append(peer.SerializeToString())
sig = signing_key.sign("".join(proto.peer_data))[:64]
proto.signature = sig
uncompressed_data = proto.SerializeToString()
request.write(uncompressed_data.encode("zlib"))
request.finish()
return server.NOT_DONE_YET
server_protocol = server.Site(WebResource(kserver))
seed_server = internet.TCPServer(8080, server_protocol)
seed_server.setServiceParent(application)
| {
"repo_name": "eXcomm/OpenBazaar-Server",
"path": "seed/httpseed.py",
"copies": "4",
"size": "7575",
"license": "mit",
"hash": 7731996488541962000,
"line_mean": 38.0463917526,
"line_max": 92,
"alpha_frac": 0.5838943894,
"autogenerated": false,
"ratio": 4.066022544283414,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6649916933683414,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import sys
import pickle
import json
import random
import argparse
import platform
from binascii import hexlify
from random import shuffle
import os
from twisted.internet import task, reactor
from twisted.web import resource, server
import stun
import nacl.signing
import nacl.hash
import nacl.encoding
from twisted.python import log, logfile
from daemon import Daemon
from seed import peers
from dht.node import Node
from dht.network import Server
from dht.crawling import NodeSpiderCrawl
from dht.utils import digest, deferredDict
from protos import objects
from net.wireprotocol import OpenBazaarProtocol
from market import network
from keyutils.keys import KeyChain
from db.datastore import Database
from constants import DATA_FOLDER
from log import Logger, FileLogObserver
def run(*args):
TESTNET = args[0]
# Create the database
db = Database(testnet=TESTNET)
# logging
logFile = logfile.LogFile.fromFullPath(DATA_FOLDER + "debug.log", rotateLength=15000000, maxRotatedFiles=1)
log.addObserver(FileLogObserver(logFile, level="debug").emit)
log.addObserver(FileLogObserver(level="debug").emit)
logger = Logger(system="Httpseed")
# Load the keys
keychain = KeyChain(db)
if os.path.isfile(DATA_FOLDER + 'keys.pickle'):
keys = pickle.load(open(DATA_FOLDER + "keys.pickle", "r"))
signing_key_hex = keys["signing_privkey"]
signing_key = nacl.signing.SigningKey(signing_key_hex, encoder=nacl.encoding.HexEncoder)
else:
signing_key = nacl.signing.SigningKey.generate()
keys = {
'signing_privkey': signing_key.encode(encoder=nacl.encoding.HexEncoder),
'signing_pubkey': signing_key.verify_key.encode(encoder=nacl.encoding.HexEncoder)
}
pickle.dump(keys, open(DATA_FOLDER + "keys.pickle", "wb"))
# Stun
port = 18467 if not TESTNET else 28467
logger.info("Finding NAT Type...")
response = stun.get_ip_info(stun_host="stun.l.google.com", source_port=port, stun_port=19302)
logger.info("%s on %s:%s" % (response[0], response[1], response[2]))
ip_address = response[1]
port = response[2]
# Start the kademlia server
this_node = Node(keychain.guid, ip_address, port, keychain.guid_signed_pubkey, vendor=False)
protocol = OpenBazaarProtocol((ip_address, port), response[0], testnet=TESTNET)
try:
kserver = Server.loadState('cache.pickle', ip_address, port, protocol, db)
except Exception:
kserver = Server(this_node, db)
kserver.protocol.connect_multiplexer(protocol)
protocol.register_processor(kserver.protocol)
kserver.saveStateRegularly('cache.pickle', 10)
# start the market server
mserver = network.Server(kserver, keychain.signing_key, db)
mserver.protocol.connect_multiplexer(protocol)
protocol.register_processor(mserver.protocol)
reactor.listenUDP(port, protocol)
class WebResource(resource.Resource):
def __init__(self, kserver_r):
resource.Resource.__init__(self)
self.kserver = kserver_r
self.nodes = {}
for bucket in self.kserver.protocol.router.buckets:
for node in bucket.getNodes():
self.nodes[(node.ip, node.port)] = node
self.nodes[(this_node.ip, this_node.port)] = this_node
loopingCall = task.LoopingCall(self.crawl)
loopingCall.start(180, True)
def crawl(self):
def gather_results(result):
for proto in result:
n = objects.Node()
try:
n.ParseFromString(proto)
node = Node(n.guid, n.ip, n.port, n.signedPublicKey, n.vendor)
self.nodes[(node.ip, node.port)] = node
except Exception:
pass
def start_crawl(results):
for node, result in results.items():
if not result[0]:
del self.nodes[(node.ip, node.port)]
node = Node(digest(random.getrandbits(255)))
nearest = self.kserver.protocol.router.findNeighbors(node)
spider = NodeSpiderCrawl(self.kserver.protocol, node, nearest, 100, 4)
spider.find().addCallback(gather_results)
ds = {}
for bucket in self.kserver.protocol.router.buckets:
for node in bucket.getNodes():
self.nodes[(node.ip, node.port)] = node
for node in self.nodes.values():
if node.id != this_node.id:
ds[node] = self.kserver.protocol.callPing(node)
deferredDict(ds).addCallback(start_crawl)
def getChild(self, child, request):
return self
def render_GET(self, request):
nodes = self.nodes.values()
shuffle(nodes)
logger.info("Received a request for nodes, responding...")
if "format" in request.args:
if request.args["format"][0] == "json":
json_list = []
if "type" in request.args and request.args["type"][0] == "vendors":
for node in nodes:
if node.vendor is True:
node_dic = {}
node_dic["ip"] = node.ip
node_dic["port"] = node.port
node_dic["guid"] = node.id.encode("hex")
node_dic["signed_pubkey"] = node.signed_pubkey.encode("hex")
json_list.append(node_dic)
sig = signing_key.sign(str(json_list))
resp = {"peers": json_list, "signature": hexlify(sig[:64])}
request.write(json.dumps(resp, indent=4))
else:
for node in nodes[:50]:
node_dic = {}
node_dic["ip"] = node.ip
node_dic["port"] = node.port
json_list.append(node_dic)
sig = signing_key.sign(str(json_list))
resp = {"peers": json_list, "signature": hexlify(sig[:64])}
request.write(json.dumps(resp, indent=4))
elif request.args["format"][0] == "protobuf":
proto = peers.PeerSeeds()
for node in nodes[:50]:
peer = peers.PeerData()
peer.ip_address = node.ip
peer.port = node.port
peer.vendor = node.vendor
proto.peer_data.append(peer.SerializeToString())
sig = signing_key.sign("".join(proto.peer_data))[:64]
proto.signature = sig
uncompressed_data = proto.SerializeToString()
request.write(uncompressed_data.encode("zlib"))
else:
proto = peers.PeerSeeds()
if "type" in request.args and request.args["type"][0] == "vendors":
for node in nodes:
if node.vendor is True:
peer = peers.PeerData()
peer.ip_address = node.ip
peer.port = node.port
peer.vendor = node.vendor
peer.guid = node.id
peer.signedPubkey = node.signed_pubkey
proto.peer_data.append(peer.SerializeToString())
sig = signing_key.sign("".join(proto.peer_data))[:64]
proto.signature = sig
uncompressed_data = proto.SerializeToString()
request.write(uncompressed_data.encode("zlib"))
else:
for node in nodes[:50]:
peer = peers.PeerData()
peer.ip_address = node.ip
peer.port = node.port
peer.vendor = node.vendor
proto.peer_data.append(peer.SerializeToString())
sig = signing_key.sign("".join(proto.peer_data))[:64]
proto.signature = sig
uncompressed_data = proto.SerializeToString()
request.write(uncompressed_data.encode("zlib"))
request.finish()
return server.NOT_DONE_YET
server_protocol = server.Site(WebResource(kserver))
reactor.listenTCP(8080, server_protocol)
reactor.run()
if __name__ == "__main__":
# pylint: disable=anomalous-backslash-in-string
class OpenBazaard(Daemon):
def run(self, *args):
run(*args)
class Parser(object):
def __init__(self, daemon):
self.daemon = daemon
parser = argparse.ArgumentParser(
description='OpenBazaard Seed Server v0.1',
usage='''
python httpseed.py <command> [<args>]
python httpseed.py <command> --help
commands:
start start the seed server
stop shutdown the server and disconnect
restart restart the server
''')
parser.add_argument('command', help='Execute the given command')
args = parser.parse_args(sys.argv[1:2])
if not hasattr(self, args.command):
parser.print_help()
exit(1)
getattr(self, args.command)()
def start(self):
parser = argparse.ArgumentParser(
description="Start the seed server",
usage='''usage:
python openbazaard.py start [-d DAEMON]''')
parser.add_argument('-d', '--daemon', action='store_true', help="run the server in the background")
parser.add_argument('-t', '--testnet', action='store_true', help="use the test network")
args = parser.parse_args(sys.argv[2:])
print "OpenBazaar Seed Server v0.1 starting..."
unix = ("linux", "linux2", "darwin")
if args.daemon and platform.system().lower() in unix:
self.daemon.start(args.testnet)
else:
run(args.testnet)
def stop(self):
# pylint: disable=W0612
parser = argparse.ArgumentParser(
description="Shutdown the server and disconnect",
usage='''usage:
python openbazaard.py stop''')
print "OpenBazaar Seed Server stopping..."
self.daemon.stop()
def restart(self):
# pylint: disable=W0612
parser = argparse.ArgumentParser(
description="Restart the server",
usage='''usage:
python openbazaard.py restart''')
print "Restarting OpenBazaar server..."
self.daemon.restart()
Parser(OpenBazaard('/tmp/httpseed.pid'))
| {
"repo_name": "hauxir/OpenBazaar-Server",
"path": "seed/httpseed.py",
"copies": "1",
"size": "11098",
"license": "mit",
"hash": -8422687940657860000,
"line_mean": 39.8014705882,
"line_max": 111,
"alpha_frac": 0.5486574158,
"autogenerated": false,
"ratio": 4.33515625,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0008190914807150915,
"num_lines": 272
} |
__author__ = 'chris'
import sys, os, atexit
from signal import SIGTERM
class Daemon(object):
"""
A generic daemon class.
Usage: subclass the Daemon class and override the run() method
"""
# pylint: disable=file-builtin
def __init__(self, pidfile, stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'):
self.stdin = stdin
self.stdout = stdout
self.stderr = stderr
self.pidfile = pidfile
def daemonize(self):
"""
do the UNIX double-fork magic, see Stevens' "Advanced
Programming in the UNIX Environment" for details (ISBN 0201563177)
http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16
"""
try:
pid = os.fork()
if pid > 0:
# exit first parent
sys.exit(0)
except OSError, e:
sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
sys.exit(1)
# decouple from parent environment
os.chdir("/")
os.setsid()
os.umask(0)
# do second fork
try:
pid = os.fork()
if pid > 0:
# exit from second parent
sys.exit(0)
except OSError, e:
sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror))
sys.exit(1)
# redirect standard file descriptors
sys.stdout.flush()
sys.stderr.flush()
si = file(self.stdin, 'r')
so = file(self.stdout, 'a+')
se = file(self.stderr, 'a+', 0)
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
# write pidfile
atexit.register(self.delpid)
pid = str(os.getpid())
file(self.pidfile, 'w+').write("%s\n" % pid)
def delpid(self):
os.remove(self.pidfile)
def start(self, *args):
"""
Start the daemon
"""
# Check for a pidfile to see if the daemon already runs
try:
pf = file(self.pidfile, 'r')
pid = int(pf.read().strip())
pf.close()
except IOError:
pid = None
if pid:
message = "pidfile %s already exist. Daemon already running?\n"
sys.stderr.write(message % self.pidfile)
sys.exit(1)
# Start the daemon
self.daemonize()
self.run(*args)
def stop(self):
"""
Stop the daemon
"""
# Get the pid from the pidfile
try:
pf = file(self.pidfile, 'r')
pid = int(pf.read().strip())
pf.close()
except IOError:
pid = None
if not pid:
message = "pidfile %s does not exist. Daemon not running?\n"
sys.stderr.write(message % self.pidfile)
return # not an error in a restart
# Try killing the daemon process
try:
os.kill(pid, SIGTERM)
except OSError, err:
err = str(err)
if err.find("No such process") > 0:
if os.path.exists(self.pidfile):
os.remove(self.pidfile)
else:
print str(err)
sys.exit(1)
def restart(self):
"""
Restart the daemon
"""
self.stop()
self.start()
def run(self, *args):
"""
You should override this method when you subclass Daemon. It will be called after the process has been
daemonized by start() or restart().
"""
| {
"repo_name": "OpenBazaar/Network",
"path": "daemon.py",
"copies": "6",
"size": "3630",
"license": "mit",
"hash": -6061110490768473000,
"line_mean": 27.359375,
"line_max": 110,
"alpha_frac": 0.5077134986,
"autogenerated": false,
"ratio": 3.9933993399339935,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.7501112838533993,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import sys, os
import gzip
from cStringIO import StringIO
from OpenSSL import SSL
from twisted.application import service, internet
from twisted.python.log import ILogObserver
from twisted.internet import ssl, task, reactor
from twisted.web import resource, server
from subspace.network import Server
from subspace import log
from subspace.node import Node
from subspace.crawling import NodeSpiderCrawl
from servers.seedserver import peerseeds
from binascii import unhexlify
from random import shuffle
from bitcoin import *
sys.path.append(os.path.dirname(__file__))
ssl_key = "/path/to/ssl.key"
ssl_cert = "/path/to/ssl.cert"
bootstrap_list = [("1.2.4.5", 8335)]
application = service.Application("subspace_seed_server")
application.setComponent(ILogObserver, log.FileLogObserver(sys.stdout, log.INFO).emit)
node_id = unhexlify(random_key())
this_node = Node(node_id, "the_server_ip_address", 8335)
if os.path.isfile('cache.pickle'):
kserver = Server.loadState('cache.pickle')
else:
kserver = Server(id=node_id)
kserver.bootstrap(bootstrap_list)
kserver.saveStateRegularly('cache.pickle', 10)
udpserver = internet.UDPServer(8335, kserver.protocol)
udpserver.setServiceParent(application)
class ChainedOpenSSLContextFactory(ssl.DefaultOpenSSLContextFactory):
def __init__(self, privateKeyFileName, certificateChainFileName,
sslmethod=SSL.SSLv23_METHOD):
"""
@param privateKeyFileName: Name of a file containing a private key
@param certificateChainFileName: Name of a file containing a certificate chain
@param sslmethod: The SSL method to use
"""
self.privateKeyFileName = privateKeyFileName
self.certificateChainFileName = certificateChainFileName
self.sslmethod = sslmethod
self.cacheContext()
def cacheContext(self):
ctx = SSL.Context(self.sslmethod)
ctx.use_certificate_chain_file(self.certificateChainFileName)
ctx.use_privatekey_file(self.privateKeyFileName)
self._context = ctx
class WebResource(resource.Resource):
def __init__(self, kserver):
resource.Resource.__init__(self)
self.kserver = kserver
self.protobuf = []
self.json = []
loopingCall = task.LoopingCall(self.crawl)
loopingCall.start(60, True)
def crawl(self):
def gather_results(result):
nodes = []
for bucket in self.kserver.protocol.router.buckets:
nodes.extend(bucket.getNodes())
nodes.append(this_node)
shuffle(nodes)
seeds = peerseeds.PeerSeeds()
json_list = []
for node in nodes[:50]:
node_dic = {}
node_dic["ip"] = node.ip
node_dic["port"] = node.port
json_list.append(node_dic)
data = seeds.seed.add()
data.ip_address = node.ip
data.port = node.port
#TODO add in services after the wire protocol is updated
seeds.timestamp = int(time.time())
seeds.net = "main"
uncompressed_data = seeds.SerializeToString()
buf = StringIO()
f = gzip.GzipFile(mode='wb', fileobj=buf)
try:
f.write(uncompressed_data)
finally:
f.close()
self.protobuf = buf.getvalue()
self.json = json.dumps(json_list, indent=4)
for bucket in self.kserver.protocol.router.buckets:
for node in bucket.getNodes():
self.kserver.protocol.callPing(node)
node = Node(unhexlify(random_key()))
nearest = self.kserver.protocol.router.findNeighbors(node)
spider = NodeSpiderCrawl(self.kserver.protocol, node, nearest, 100, 4)
d = spider.find().addCallback(gather_results)
def getChild(self, child, request):
return self
def render_GET(self, request):
log.msg("Received a request for nodes, responding...")
if "format" in request.args:
if request.args["format"][0] == "json":
request.write(self.json)
elif request.args["format"][0] == "protobuf":
request.write(self.protobuf)
else:
request.write(self.protobuf)
request.finish()
return server.NOT_DONE_YET
server_protocol = server.Site(WebResource(kserver))
seed_server = internet.SSLServer(8080, server_protocol, ChainedOpenSSLContextFactory(ssl_key, ssl_cert))
seed_server.setServiceParent(application)
| {
"repo_name": "cpacia/Subspace",
"path": "servers/seedserver/seedserver.py",
"copies": "1",
"size": "4594",
"license": "mit",
"hash": -633733203088176600,
"line_mean": 33.5413533835,
"line_max": 104,
"alpha_frac": 0.6460600784,
"autogenerated": false,
"ratio": 3.9671848013816926,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5113244879781692,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import sys, os, time, atexit
from signal import SIGTERM
class Daemon(object):
"""
A generic daemon class.
Usage: subclass the Daemon class and override the run() method
"""
# pylint: disable=file-builtin
def __init__(self, pidfile, stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'):
self.stdin = stdin
self.stdout = stdout
self.stderr = stderr
self.pidfile = pidfile
def daemonize(self):
"""
do the UNIX double-fork magic, see Stevens' "Advanced
Programming in the UNIX Environment" for details (ISBN 0201563177)
http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16
"""
try:
pid = os.fork()
if pid > 0:
# exit first parent
sys.exit(0)
except OSError, e:
sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
sys.exit(1)
# decouple from parent environment
os.chdir("/")
os.setsid()
os.umask(0)
# do second fork
try:
pid = os.fork()
if pid > 0:
# exit from second parent
sys.exit(0)
except OSError, e:
sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror))
sys.exit(1)
# redirect standard file descriptors
sys.stdout.flush()
sys.stderr.flush()
si = file(self.stdin, 'r')
so = file(self.stdout, 'a+')
se = file(self.stderr, 'a+', 0)
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
# write pidfile
atexit.register(self.delpid)
pid = str(os.getpid())
file(self.pidfile, 'w+').write("%s\n" % pid)
def delpid(self):
os.remove(self.pidfile)
def start(self, *args):
"""
Start the daemon
"""
# Check for a pidfile to see if the daemon already runs
try:
pf = file(self.pidfile, 'r')
pid = int(pf.read().strip())
pf.close()
except IOError:
pid = None
if pid:
message = "pidfile %s already exist. Daemon already running?\n"
sys.stderr.write(message % self.pidfile)
sys.exit(1)
# Start the daemon
self.daemonize()
self.run(*args)
def stop(self):
"""
Stop the daemon
"""
# Get the pid from the pidfile
try:
pf = file(self.pidfile, 'r')
pid = int(pf.read().strip())
pf.close()
except IOError:
pid = None
if not pid:
message = "pidfile %s does not exist. Daemon not running?\n"
sys.stderr.write(message % self.pidfile)
return # not an error in a restart
# Try killing the daemon process
try:
while 1:
os.kill(pid, SIGTERM)
time.sleep(0.1)
except OSError, err:
err = str(err)
if err.find("No such process") > 0:
if os.path.exists(self.pidfile):
os.remove(self.pidfile)
else:
print str(err)
sys.exit(1)
def restart(self):
"""
Restart the daemon
"""
self.stop()
self.start()
def run(self, *args):
"""
You should override this method when you subclass Daemon. It will be called after the process has been
daemonized by start() or restart().
"""
| {
"repo_name": "hauxir/OpenBazaar-Server",
"path": "daemon.py",
"copies": "4",
"size": "3692",
"license": "mit",
"hash": -2168159017127064800,
"line_mean": 27.6201550388,
"line_max": 110,
"alpha_frac": 0.5048754063,
"autogenerated": false,
"ratio": 3.9956709956709955,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6500546401970997,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import time
import pickle
import sys
import argparse
import platform
from twisted.internet import reactor, task
from twisted.python import log, logfile
from twisted.web.server import Site
from twisted.web.static import File
import stun
import requests
import threading
from autobahn.twisted.websocket import listenWS
from daemon import Daemon
from db.datastore import Database
from keyutils.keys import KeyChain
from dht.network import Server
from dht.node import Node
from net.wireprotocol import OpenBazaarProtocol
from constants import DATA_FOLDER, KSIZE, ALPHA, LIBBITCOIN_SERVER,\
LIBBITCOIN_SERVER_TESTNET, SSL_KEY, SSL_CERT, SEED, SEED_NODE, SEED_NODE_TESTNET
from market import network
from market.listeners import MessageListenerImpl, BroadcastListenerImpl, NotificationListenerImpl
from api.ws import WSFactory, WSProtocol
from api.restapi import OpenBazaarAPI
from dht.storage import PersistentStorage, ForgetfulStorage
from market.profile import Profile
from market.contracts import check_unfunded_for_payment
from log import Logger, FileLogObserver
from net.upnp import PortMapper
from net.sslcontext import ChainedOpenSSLContextFactory
from obelisk.client import LibbitcoinClient
def run(*args):
TESTNET = args[0]
LOGLEVEL = args[1]
PORT = args[2]
ALLOWIP = args[3]
SSL = args[4]
RESTPORT = args[5]
WSPORT = args[6]
# database
db = Database(TESTNET)
# key generation
keys = KeyChain(db)
# logging
logFile = logfile.LogFile.fromFullPath(DATA_FOLDER + "debug.log", rotateLength=15000000, maxRotatedFiles=1)
log.addObserver(FileLogObserver(logFile, level=LOGLEVEL).emit)
log.addObserver(FileLogObserver(level=LOGLEVEL).emit)
logger = Logger(system="OpenBazaard")
# NAT traversal
p = PortMapper()
threading.Thread(target=p.add_port_mapping, args=(PORT, PORT, "UDP")).start()
logger.info("Finding NAT Type...")
while True:
# sometimes the stun server returns a code the client
# doesn't understand so we have to try again
try:
response = stun.get_ip_info(source_port=PORT)
break
except Exception:
pass
logger.info("%s on %s:%s" % (response[0], response[1], response[2]))
nat_type = response[0]
ip_address = response[1]
port = response[2]
# TODO: use TURN if symmetric NAT
def on_bootstrap_complete(resp):
logger.info("bootstrap complete")
mserver.get_messages(mlistener)
task.LoopingCall(check_unfunded_for_payment, db, libbitcoin_client, nlistener, TESTNET).start(600)
protocol = OpenBazaarProtocol((ip_address, port), response[0], testnet=TESTNET)
# kademlia
node = Node(keys.guid, ip_address, port, signed_pubkey=keys.guid_signed_pubkey, vendor=Profile(db).get().vendor)
storage = ForgetfulStorage() if TESTNET else PersistentStorage(db.DATABASE)
try:
kserver = Server.loadState(DATA_FOLDER + 'cache.pickle', ip_address, port, protocol, db,
on_bootstrap_complete, storage=storage)
except Exception:
kserver = Server(node, db, KSIZE, ALPHA, storage=storage)
kserver.protocol.connect_multiplexer(protocol)
kserver.bootstrap(kserver.querySeed(SEED)).addCallback(on_bootstrap_complete)
kserver.saveStateRegularly(DATA_FOLDER + 'cache.pickle', 10)
protocol.register_processor(kserver.protocol)
if nat_type != "Full Cone":
kserver.protocol.ping(SEED_NODE_TESTNET if TESTNET else SEED_NODE)
# market
mserver = network.Server(kserver, keys.signing_key, db)
mserver.protocol.connect_multiplexer(protocol)
protocol.register_processor(mserver.protocol)
reactor.listenUDP(port, protocol)
class OnlyIP(Site):
def __init__(self, resource, ip, timeout=60 * 60 * 1):
self.ip = ip
Site.__init__(self, resource, timeout=timeout)
def buildProtocol(self, addr):
if addr.host == self.ip:
return Site.buildProtocol(self, addr)
return None
# websockets api
if SSL:
ws_factory = WSFactory("wss://127.0.0.1:" + str(WSPORT), mserver, kserver, only_ip=ALLOWIP)
contextFactory = ChainedOpenSSLContextFactory(SSL_KEY, SSL_CERT)
ws_factory.protocol = WSProtocol
listenWS(ws_factory, contextFactory)
else:
ws_factory = WSFactory("ws://127.0.0.1:" + str(WSPORT), mserver, kserver, only_ip=ALLOWIP)
ws_factory.protocol = WSProtocol
listenWS(ws_factory)
if ALLOWIP != "127.0.0.1" and ALLOWIP != "0.0.0.0":
ws_interface = "0.0.0.0"
else:
ws_interface = ALLOWIP
webdir = File(".")
web = Site(webdir)
reactor.listenTCP(WSPORT - 1, web, interface=ws_interface)
# rest api
api = OpenBazaarAPI(mserver, kserver, protocol)
if ALLOWIP != "127.0.0.1" and ALLOWIP != "0.0.0.0":
rest_interface = "0.0.0.0"
site = OnlyIP(api, ALLOWIP, timeout=None)
else:
rest_interface = ALLOWIP
site = Site(api, timeout=None)
if SSL:
reactor.listenSSL(RESTPORT, site, ChainedOpenSSLContextFactory(SSL_KEY, SSL_CERT), interface=rest_interface)
else:
reactor.listenTCP(RESTPORT, site, interface=rest_interface)
# blockchain
if TESTNET:
libbitcoin_client = LibbitcoinClient(LIBBITCOIN_SERVER_TESTNET, log=Logger(service="LibbitcoinClient"))
else:
libbitcoin_client = LibbitcoinClient(LIBBITCOIN_SERVER, log=Logger(service="LibbitcoinClient"))
# listeners
nlistener = NotificationListenerImpl(ws_factory, db)
mserver.protocol.add_listener(nlistener)
mlistener = MessageListenerImpl(ws_factory, db)
mserver.protocol.add_listener(mlistener)
blistener = BroadcastListenerImpl(ws_factory, db)
mserver.protocol.add_listener(blistener)
protocol.set_servers(ws_factory, libbitcoin_client)
logger.info("Startup took %s seconds" % str(round(time.time() - args[7], 2)))
reactor.run()
if __name__ == "__main__":
# pylint: disable=anomalous-backslash-in-string
class OpenBazaard(Daemon):
def run(self, *args):
run(*args)
class Parser(object):
def __init__(self, daemon):
self.daemon = daemon
parser = argparse.ArgumentParser(
description='OpenBazaard v0.1',
usage='''
python openbazaard.py <command> [<args>]
python openbazaard.py <command> --help
commands:
start start the OpenBazaar server
stop shutdown the server and disconnect
restart restart the server
''')
parser.add_argument('command', help='Execute the given command')
args = parser.parse_args(sys.argv[1:2])
if not hasattr(self, args.command):
parser.print_help()
exit(1)
getattr(self, args.command)()
def start(self):
parser = argparse.ArgumentParser(
description="Start the OpenBazaar server",
usage='''usage:
python openbazaard.py start [-d DAEMON]''')
parser.add_argument('-d', '--daemon', action='store_true',
help="run the server in the background as a daemon")
parser.add_argument('-t', '--testnet', action='store_true', help="use the test network")
parser.add_argument('-s', '--ssl', action='store_true',
help="use ssl on api connections. you must set the path to your "
"certificate and private key in the config file.")
parser.add_argument('-l', '--loglevel', default="info",
help="set the logging level [debug, info, warning, error, critical]")
parser.add_argument('-p', '--port', help="set the network port")
parser.add_argument('-a', '--allowip', default="127.0.0.1",
help="only allow api connections from this ip")
parser.add_argument('-r', '--restapiport', help="set the rest api port", default=18469)
parser.add_argument('-w', '--websocketport', help="set the websocket api port", default=18466)
parser.add_argument('--pidfile', help="name of the pid file", default="openbazaard.pid")
args = parser.parse_args(sys.argv[2:])
OKBLUE = '\033[94m'
ENDC = '\033[0m'
print "________ " + OKBLUE + " __________" + ENDC
print "\_____ \ ______ ____ ____" + OKBLUE + \
"\______ \_____ _____________ _____ _______" + ENDC
print " / | \\\____ \_/ __ \ / \\" + OKBLUE +\
"| | _/\__ \ \___ /\__ \ \__ \\\_ __ \ " + ENDC
print "/ | \ |_> > ___/| | \ " + OKBLUE \
+ "| \ / __ \_/ / / __ \_/ __ \| | \/" + ENDC
print "\_______ / __/ \___ >___| /" + OKBLUE + "______ /(____ /_____ \(____ (____ /__|" + ENDC
print " \/|__| \/ \/ " + OKBLUE + " \/ \/ \/ \/ \/" + ENDC
print
print "OpenBazaar Server v0.1 starting..."
# If the user recently shut down we need to pause to make sure the socket is
# fully closed before starting back up.
try:
with open(DATA_FOLDER + "cache.pickle", 'r') as f:
data = pickle.load(f)
if "shutdown_time" in data:
current_time = time.time()
if current_time - data["shutdown_time"] < 5:
time.sleep(5 - (current_time - data["shutdown_time"]))
except IOError:
pass
unix = ("linux", "linux2", "darwin")
if args.port:
port = int(args.port)
else:
port = 18467 if not args.testnet else 28467
if args.daemon and platform.system().lower() in unix:
self.daemon.pidfile = "/tmp/" + args.pidfile
self.daemon.start(args.testnet, args.loglevel, port, args.allowip, args.ssl,
int(args.restapiport), int(args.websocketport), time.time())
else:
run(args.testnet, args.loglevel, port, args.allowip, args.ssl,
int(args.restapiport), int(args.websocketport), time.time())
def stop(self):
# pylint: disable=W0612
parser = argparse.ArgumentParser(
description="Shutdown the server and disconnect",
usage='''usage:
python openbazaard.py stop''')
parser.parse_args(sys.argv[2:])
print "OpenBazaar server stopping..."
try:
requests.get("http://localhost:18469/api/v1/shutdown")
except Exception:
self.daemon.stop()
def restart(self):
# pylint: disable=W0612
parser = argparse.ArgumentParser(
description="Restart the server",
usage='''usage:
python openbazaard.py restart''')
parser.parse_args(sys.argv[2:])
print "Restarting OpenBazaar server..."
self.daemon.restart()
Parser(OpenBazaard('/tmp/openbazaard.pid'))
| {
"repo_name": "hauxir/OpenBazaar-Server",
"path": "openbazaard.py",
"copies": "1",
"size": "11432",
"license": "mit",
"hash": 1310892587505464000,
"line_mean": 39.1122807018,
"line_max": 116,
"alpha_frac": 0.5900979706,
"autogenerated": false,
"ratio": 3.8093968677107632,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4899494838310763,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import time
import random
from binascii import unhexlify
import os
import mock
import nacl.signing
import nacl.encoding
import nacl.hash
from txrudp import connection, rudp, packet, constants
from twisted.trial import unittest
from twisted.internet import task, reactor, address, udp, defer
from dht.protocol import KademliaProtocol
from dht.utils import digest
from dht.storage import ForgetfulStorage
from dht.tests.utils import mknode
from dht.node import Node
from protos import message, objects
from wireprotocol import OpenBazaarProtocol
from db import datastore
class KademliaProtocolTest(unittest.TestCase):
def setUp(self):
self.public_ip = '123.45.67.89'
self.port = 12345
self.own_addr = (self.public_ip, self.port)
self.addr1 = ('132.54.76.98', 54321)
self.addr2 = ('231.76.45.89', 15243)
self.clock = task.Clock()
connection.REACTOR.callLater = self.clock.callLater
self.proto_mock = mock.Mock(spec_set=rudp.ConnectionMultiplexer)
self.handler_mock = mock.Mock(spec_set=connection.Handler)
self.con = connection.Connection(
self.proto_mock,
self.handler_mock,
self.own_addr,
self.addr1
)
valid_key = "1a5c8e67edb8d279d1ae32fa2da97e236b95e95c837dc8c3c7c2ff7a7cc29855"
self.signing_key = nacl.signing.SigningKey(valid_key, encoder=nacl.encoding.HexEncoder)
verify_key = self.signing_key.verify_key
signed_pubkey = self.signing_key.sign(str(verify_key))
h = nacl.hash.sha512(signed_pubkey)
self.storage = ForgetfulStorage()
self.node = Node(unhexlify(h[:40]), self.public_ip, self.port, signed_pubkey, True)
self.db = datastore.Database(filepath="test.db")
self.protocol = KademliaProtocol(self.node, self.storage, 20, self.db)
self.wire_protocol = OpenBazaarProtocol(self.own_addr)
self.wire_protocol.register_processor(self.protocol)
self.protocol.connect_multiplexer(self.wire_protocol)
self.handler = self.wire_protocol.ConnHandler([self.protocol], self.wire_protocol)
self.handler.connection = self.con
transport = mock.Mock(spec_set=udp.Port)
ret_val = address.IPv4Address('UDP', self.public_ip, self.port)
transport.attach_mock(mock.Mock(return_value=ret_val), 'getHost')
self.wire_protocol.makeConnection(transport)
def tearDown(self):
self.con.shutdown()
self.wire_protocol.shutdown()
os.remove("test.db")
def test_invalid_datagram(self):
self.assertFalse(self.handler.receive_message("hi"))
self.assertFalse(self.handler.receive_message("hihihihihihihihihihihihihihihihihihihihih"))
def test_rpc_ping(self):
self._connecting_to_connected()
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("PING")
m.testnet = False
data = m.SerializeToString()
m.arguments.append(self.protocol.sourceNode.getProto().SerializeToString())
expected_message = m.SerializeToString()
self.handler.receive_message(data)
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
m_calls = self.proto_mock.send_datagram.call_args_list
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
received_message = sent_packet.payload
self.assertEqual(received_message, expected_message)
self.assertEqual(len(m_calls), 2)
def test_rpc_store(self):
self._connecting_to_connected()
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("STORE")
m.testnet = False
m.arguments.extend([digest("Keyword"), "Key", self.protocol.sourceNode.getProto().SerializeToString()])
data = m.SerializeToString()
del m.arguments[-3:]
m.arguments.append("True")
expected_message = m.SerializeToString()
self.handler.receive_message(data)
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
m_calls = self.proto_mock.send_datagram.call_args_list
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
received_message = sent_packet.payload
self.assertEqual(received_message, expected_message)
self.assertEqual(len(m_calls), 2)
self.assertTrue(
self.storage.getSpecific(digest("Keyword"), "Key") ==
self.protocol.sourceNode.getProto().SerializeToString())
def test_bad_rpc_store(self):
r = self.protocol.rpc_store(self.node, 'testkeyword', 'kw', 'val')
self.assertEqual(r, ['False'])
def test_rpc_delete(self):
self._connecting_to_connected()
# Set a keyword to store
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("STORE")
m.testnet = False
m.arguments.extend([digest("Keyword"), "Key", self.protocol.sourceNode.getProto().SerializeToString()])
data = m.SerializeToString()
del m.arguments[-3:]
m.arguments.append("True")
expected_message1 = m.SerializeToString()
self.handler.receive_message(data)
self.assertTrue(
self.storage.getSpecific(digest("Keyword"), "Key") ==
self.protocol.sourceNode.getProto().SerializeToString())
# Test bad signature
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("DELETE")
m.testnet = False
m.arguments.extend([digest("Keyword"), "Key", "Bad Signature"])
data = m.SerializeToString()
del m.arguments[-3:]
m.arguments.append("False")
expected_message2 = m.SerializeToString()
self.handler.receive_message(data)
self.assertTrue(
self.storage.getSpecific(digest("Keyword"), "Key") ==
self.protocol.sourceNode.getProto().SerializeToString())
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
sent_packets = tuple(
packet.Packet.from_bytes(call[0][0])
for call in self.proto_mock.send_datagram.call_args_list
)
self.assertEqual(sent_packets[0].payload, expected_message1)
self.assertEqual(sent_packets[1].payload, expected_message2)
self.proto_mock.send_datagram.call_args_list = []
# Test good signature
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("DELETE")
m.testnet = False
m.arguments.extend([digest("Keyword"), "Key", self.signing_key.sign("Key")[:64]])
data = m.SerializeToString()
del m.arguments[-3:]
m.arguments.append("True")
expected_message3 = m.SerializeToString()
self.handler.receive_message(data)
self.clock.advance(100 * constants.PACKET_TIMEOUT)
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
self.assertEqual(sent_packet.payload, expected_message3)
self.assertTrue(self.storage.getSpecific(digest("Keyword"), "Key") is None)
def test_rpc_stun(self):
self._connecting_to_connected()
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("STUN")
m.testnet = False
data = m.SerializeToString()
m.arguments.extend([self.public_ip, str(self.port)])
expected_message = m.SerializeToString()
self.handler.receive_message(data)
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
m_calls = self.proto_mock.send_datagram.call_args_list
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
received_message = sent_packet.payload
a = message.Message()
a.ParseFromString(received_message)
self.assertEqual(received_message, expected_message)
self.assertEqual(len(m_calls), 2)
def test_rpc_find_node(self):
self._connecting_to_connected()
node1 = Node(digest("id1"), "127.0.0.1", 12345, digest("key1"))
node2 = Node(digest("id2"), "127.0.0.1", 22222, digest("key2"))
node3 = Node(digest("id3"), "127.0.0.1", 77777, digest("key3"))
self.protocol.router.addContact(node1)
self.protocol.router.addContact(node2)
self.protocol.router.addContact(node3)
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("FIND_NODE")
m.testnet = False
m.arguments.append(digest("nodetofind"))
data = m.SerializeToString()
del m.arguments[-1]
m.arguments.extend([node2.getProto().SerializeToString(), node1.getProto().SerializeToString(),
node3.getProto().SerializeToString()])
expected_message = m.SerializeToString()
self.handler.receive_message(data)
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
m_calls = self.proto_mock.send_datagram.call_args_list
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
received_message = sent_packet.payload
a = message.Message()
a.ParseFromString(received_message)
self.assertEqual(received_message, expected_message)
self.assertEqual(len(m_calls), 2)
def test_rpc_find_value(self):
self._connecting_to_connected()
# Set a value to find
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("STORE")
m.arguments.extend([digest("Keyword"), "Key", self.protocol.sourceNode.getProto().SerializeToString()])
data = m.SerializeToString()
self.handler.receive_message(data)
self.assertTrue(
self.storage.getSpecific(digest("Keyword"), "Key") ==
self.protocol.sourceNode.getProto().SerializeToString())
# Send the find_value rpc
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("FIND_VALUE")
m.testnet = False
m.arguments.append(digest("Keyword"))
data = m.SerializeToString()
self.handler.receive_message(data)
del m.arguments[-1]
value = objects.Value()
value.valueKey = "Key"
value.serializedData = self.protocol.sourceNode.getProto().SerializeToString()
m.arguments.append("value")
m.arguments.append(value.SerializeToString())
expected_message = m.SerializeToString()
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
m_calls = self.proto_mock.send_datagram.call_args_list
sent_packets = tuple(
packet.Packet.from_bytes(call[0][0])
for call in self.proto_mock.send_datagram.call_args_list
)
received_message = sent_packets[1].payload
self.assertEqual(received_message, expected_message)
self.assertEqual(len(m_calls), 3)
def test_rpc_find_without_value(self):
self._connecting_to_connected()
node1 = Node(digest("id1"), "127.0.0.1", 12345, digest("key1"))
node2 = Node(digest("id2"), "127.0.0.1", 22222, digest("key2"))
node3 = Node(digest("id3"), "127.0.0.1", 77777, digest("key3"))
self.protocol.router.addContact(node1)
self.protocol.router.addContact(node2)
self.protocol.router.addContact(node3)
m = message.Message()
m.messageID = digest("msgid")
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("FIND_VALUE")
m.testnet = False
m.arguments.append(digest("Keyword"))
data = m.SerializeToString()
self.handler.receive_message(data)
del m.arguments[-1]
m.arguments.extend([node3.getProto().SerializeToString(), node1.getProto().SerializeToString(),
node2.getProto().SerializeToString()])
expected_message = m.SerializeToString()
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
m_calls = self.proto_mock.send_datagram.call_args_list
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
received_message = sent_packet.payload
m = message.Message()
m.ParseFromString(received_message)
self.assertEqual(received_message, expected_message)
self.assertEqual(len(m_calls), 2)
def test_callPing(self):
self._connecting_to_connected()
n = Node(digest("S"), self.addr1[0], self.addr1[1])
self.wire_protocol[self.addr1] = self.con
self.protocol.callPing(n)
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
sent_message = sent_packet.payload
m = message.Message()
m.ParseFromString(sent_message)
self.assertTrue(len(m.messageID) == 20)
self.assertEqual(self.protocol.sourceNode.getProto().guid, m.sender.guid)
self.assertEqual(self.protocol.sourceNode.getProto().signedPublicKey, m.sender.signedPublicKey)
self.assertTrue(m.command == message.PING)
self.assertEqual(self.proto_mock.send_datagram.call_args_list[0][0][1], self.addr1)
def test_callStore(self):
self._connecting_to_connected()
n = Node(digest("S"), self.addr1[0], self.addr1[1])
self.wire_protocol[self.addr1] = self.con
self.protocol.callStore(n, digest("Keyword"), digest("Key"),
self.protocol.sourceNode.getProto().SerializeToString())
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
sent_message = sent_packet.payload
m = message.Message()
m.ParseFromString(sent_message)
self.assertTrue(len(m.messageID) == 20)
self.assertEqual(self.protocol.sourceNode.getProto().guid, m.sender.guid)
self.assertEqual(self.protocol.sourceNode.getProto().signedPublicKey, m.sender.signedPublicKey)
self.assertTrue(m.command == message.STORE)
self.assertEqual(self.proto_mock.send_datagram.call_args_list[0][0][1], self.addr1)
self.assertEqual(m.arguments[0], digest("Keyword"))
self.assertEqual(m.arguments[1], digest("Key"))
self.assertEqual(m.arguments[2], self.protocol.sourceNode.getProto().SerializeToString())
def test_callFindValue(self):
self._connecting_to_connected()
n = Node(digest("S"), self.addr1[0], self.addr1[1])
self.wire_protocol[self.addr1] = self.con
keyword = Node(digest("Keyword"))
self.protocol.callFindValue(n, keyword)
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
sent_message = sent_packet.payload
m = message.Message()
m.ParseFromString(sent_message)
self.assertTrue(len(m.messageID) == 20)
self.assertEqual(self.protocol.sourceNode.getProto().guid, m.sender.guid)
self.assertEqual(self.protocol.sourceNode.getProto().signedPublicKey, m.sender.signedPublicKey)
self.assertTrue(m.command == message.FIND_VALUE)
self.assertEqual(self.proto_mock.send_datagram.call_args_list[0][0][1], self.addr1)
self.assertEqual(m.arguments[0], keyword.id)
def test_callFindNode(self):
self._connecting_to_connected()
n = Node(digest("S"), self.addr1[0], self.addr1[1])
self.wire_protocol[self.addr1] = self.con
keyword = Node(digest("nodetofind"))
self.protocol.callFindNode(n, keyword)
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
sent_message = sent_packet.payload
m = message.Message()
m.ParseFromString(sent_message)
self.assertTrue(len(m.messageID) == 20)
self.assertEqual(self.protocol.sourceNode.getProto().guid, m.sender.guid)
self.assertEqual(self.protocol.sourceNode.getProto().signedPublicKey, m.sender.signedPublicKey)
self.assertTrue(m.command == message.FIND_NODE)
self.assertEqual(self.proto_mock.send_datagram.call_args_list[0][0][1], self.addr1)
self.assertEqual(m.arguments[0], keyword.id)
def test_callDelete(self):
self._connecting_to_connected()
n = Node(digest("S"), self.addr1[0], self.addr1[1])
self.wire_protocol[self.addr1] = self.con
self.protocol.callDelete(n, digest("Keyword"), digest("Key"), digest("Signature"))
self.clock.advance(100 * constants.PACKET_TIMEOUT)
connection.REACTOR.runUntilCurrent()
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
sent_message = sent_packet.payload
m = message.Message()
m.ParseFromString(sent_message)
self.assertEqual(self.proto_mock.send_datagram.call_args_list[0][0][1], self.addr1)
self.assertTrue(len(m.messageID) == 20)
self.assertEqual(self.protocol.sourceNode.getProto().guid, m.sender.guid)
self.assertEqual(self.protocol.sourceNode.getProto().signedPublicKey, m.sender.signedPublicKey)
self.assertTrue(m.command == message.DELETE)
self.assertEqual(m.arguments[0], digest("Keyword"))
self.assertEqual(m.arguments[1], digest("Key"))
self.assertEqual(m.arguments[2], digest("Signature"))
def test_acceptResponse(self):
self._connecting_to_connected()
def handle_response(resp):
self.assertTrue(resp[0])
self.assertEqual(resp[1][0], "test")
self.assertTrue(message_id not in self.protocol._outstanding)
self.assertFalse(timeout.active())
message_id = digest("msgid")
n = Node(digest("S"), self.addr1[0], self.addr1[1])
d = defer.Deferred()
timeout = reactor.callLater(5, self.protocol._timeout, message_id)
self.protocol._outstanding[message_id] = (d, timeout)
self.protocol._acceptResponse(message_id, ["test"], n)
return d.addCallback(handle_response)
def test_unknownRPC(self):
self.assertFalse(self.handler.receive_message(str(random.getrandbits(1400))))
def test_timeout(self):
self._connecting_to_connected()
self.wire_protocol[self.addr1] = self.con
def test_remove_outstanding():
self.assertTrue(len(self.protocol._outstanding) == 0)
def test_deffered(d):
self.assertFalse(d[0])
test_remove_outstanding()
n = Node(digest("S"), self.addr1[0], self.addr1[1])
d = self.protocol.callPing(n)
self.clock.advance(6)
connection.REACTOR.runUntilCurrent()
self.clock.advance(6)
return d.addCallback(test_deffered)
def test_transferKeyValues(self):
self._connecting_to_connected()
self.wire_protocol[self.addr1] = self.con
self.protocol.addToRouter(mknode())
self.protocol.storage[digest("keyword")] = (
digest("key"), self.protocol.sourceNode.getProto().SerializeToString())
self.protocol.transferKeyValues(Node(digest("id"), self.addr1[0], self.addr1[1]))
self.clock.advance(1)
connection.REACTOR.runUntilCurrent()
sent_packet = packet.Packet.from_bytes(self.proto_mock.send_datagram.call_args_list[0][0][0])
sent_message = sent_packet.payload
x = message.Message()
x.ParseFromString(sent_message)
m = message.Message()
m.sender.MergeFrom(self.protocol.sourceNode.getProto())
m.command = message.Command.Value("STORE")
m.arguments.append(digest("keyword"))
m.arguments.append(digest("key"))
m.arguments.append(self.protocol.sourceNode.getProto().SerializeToString())
self.assertEqual(x.sender, m.sender)
self.assertEqual(x.command, m.command)
self.assertEqual(x.arguments[0], m.arguments[0])
self.assertEqual(x.arguments[1], m.arguments[1])
self.assertEqual(x.arguments[2], m.arguments[2])
def test_refreshIDs(self):
node1 = Node(digest("id1"), "127.0.0.1", 12345, signed_pubkey=digest("key1"))
node2 = Node(digest("id2"), "127.0.0.1", 22222, signed_pubkey=digest("key2"))
node3 = Node(digest("id3"), "127.0.0.1", 77777, signed_pubkey=digest("key3"))
self.protocol.router.addContact(node1)
self.protocol.router.addContact(node2)
self.protocol.router.addContact(node3)
for b in self.protocol.router.buckets:
b.lastUpdated = (time.time() - 5000)
ids = self.protocol.getRefreshIDs()
self.assertTrue(len(ids) == 1)
def _connecting_to_connected(self):
remote_synack_packet = packet.Packet.from_data(
42,
self.con.own_addr,
self.con.dest_addr,
ack=0,
syn=True
)
self.con.receive_packet(remote_synack_packet)
self.clock.advance(0)
connection.REACTOR.runUntilCurrent()
self.next_remote_seqnum = 43
m_calls = self.proto_mock.send_datagram.call_args_list
sent_syn_packet = packet.Packet.from_bytes(m_calls[0][0][0])
seqnum = sent_syn_packet.sequence_number
self.handler_mock.reset_mock()
self.proto_mock.reset_mock()
self.next_seqnum = seqnum + 1
def test_badRPCDelete(self):
n = mknode()
val = self.protocol.rpc_delete(n, 'testkeyword', 'key', 'testsig')
self.assertEqual(val, ["False"])
val = self.protocol.rpc_delete(n, '', '', '')
| {
"repo_name": "melpomene/OpenBazaar-Server",
"path": "dht/tests/test_protocol.py",
"copies": "3",
"size": "23132",
"license": "mit",
"hash": -1043259563948300300,
"line_mean": 41.6789667897,
"line_max": 111,
"alpha_frac": 0.6487117413,
"autogenerated": false,
"ratio": 3.6764144945963126,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0009983571869119482,
"num_lines": 542
} |
__author__ = 'chris'
import time
import sys
from daemon import Daemon
from twisted.internet import reactor
from txjsonrpc.netstring.jsonrpc import Proxy
def run_echoserver():
def getNew():
time.sleep(1)
d = proxy.callRemote('getnew')
d.addCallbacks(echo, printError)
def echo(messages):
def printResp(resp):
print resp
for message in messages:
value = message["plaintext"]
key = message["sender"]
print "Received message, echoing..."
d = proxy.callRemote('send', key, "Echo: " + str(value))
d.addCallback(printResp)
getNew()
def printError(error):
print 'error', error
getNew()
proxy = Proxy('127.0.0.1', 8334)
getNew()
reactor.run()
class EchoDaemon(Daemon):
def run(self):
run_echoserver()
if __name__ == "__main__":
daemon = EchoDaemon('/tmp/echodaemon.pid')
if len(sys.argv) == 2:
if 'start' == sys.argv[1]:
daemon.start()
elif 'stop' == sys.argv[1]:
daemon.stop()
elif 'restart' == sys.argv[1]:
daemon.restart()
else:
print "Unknown command"
sys.exit(2)
sys.exit(0)
else:
print "usage: %s start|stop|restart" % sys.argv[0]
sys.exit(2)
| {
"repo_name": "cpacia/Subspace",
"path": "servers/echoserver/echoserver.py",
"copies": "1",
"size": "1483",
"license": "mit",
"hash": -4493611053167628300,
"line_mean": 26.9811320755,
"line_max": 68,
"alpha_frac": 0.4976399191,
"autogenerated": false,
"ratio": 4.1194444444444445,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.006026430271664905,
"num_lines": 53
} |
__author__ = 'chris'
import time
from twisted.trial import unittest
from dht.utils import digest
from dht.storage import ForgetfulStorage, PersistentStorage, TTLDict
from protos.objects import Value
class ForgetFulStorageTest(unittest.TestCase):
def setUp(self):
self.keyword1 = digest("shoes")
self.keyword2 = digest("socks")
self.key1 = digest("contract1")
self.key2 = digest("contract2")
self.value = digest("node")
def test_setitem(self):
f = ForgetfulStorage()
tdict1 = TTLDict(3)
tdict1[self.key1] = self.value
f[self.keyword1] = (self.key1, self.value, 10)
tdict2 = TTLDict(3)
tdict2[self.key1] = self.value
tdict2[self.key2] = self.value
f[self.keyword2] = (self.key1, self.value, 10)
f[self.keyword2] = (self.key2, self.value, 10)
self.assertEqual(f.data[self.keyword1], tdict1)
self.assertEqual(f.data[self.keyword2], tdict2)
def test_getitem(self):
f = ForgetfulStorage()
tdict = TTLDict(3)
tdict[self.key1] = self.value
f[self.keyword1] = (self.key1, self.value, 10)
self.assertEqual(tdict, f[self.keyword1])
def test_get(self):
v = Value()
v.valueKey = self.key1
v.serializedData = self.value
v.ttl = 10
testv = [v.SerializeToString()]
f = ForgetfulStorage()
f[self.keyword1] = (self.key1, self.value, 10)
self.assertEqual(testv, f.get(self.keyword1))
def test_getSpecific(self):
f = ForgetfulStorage()
f[self.keyword1] = (self.key1, self.value, 10)
self.assertEqual(self.value, f.getSpecific(self.keyword1, self.key1))
def test_delete(self):
f = ForgetfulStorage()
f[self.keyword1] = (self.key1, self.value, 10)
f.delete(self.keyword1, self.key1)
self.assertEqual(f.get(self.keyword1), None)
def test_iterkeys(self):
f = ForgetfulStorage()
f[self.keyword1] = (self.key1, self.value, 10)
for k in f.iterkeys():
self.assertEqual(k, self.keyword1)
def test_iteritems(self):
f = ForgetfulStorage()
f[self.keyword1] = (self.key1, self.value, 10)
for k, v in f.iteritems(self.keyword1):
self.assertEqual((self.key1, self.value), (k, v))
def test_ttl(self):
f = ForgetfulStorage()
f[self.keyword1] = (self.key1, self.value, .00000000000001)
self.assertTrue(self.keyword1 not in f)
class PersistentStorageTest(unittest.TestCase):
def setUp(self):
self.keyword1 = digest("shoes")
self.keyword2 = digest("socks")
self.key1 = digest("contract1")
self.key2 = digest("contract2")
self.value = digest("node")
def test_setitem(self):
p = PersistentStorage(":memory:")
p[self.keyword1] = (self.key1, self.value, 10)
p[self.keyword2] = (self.key1, self.value, 10)
p[self.keyword2] = (self.key2, self.value, 10)
self.assertEqual(p[self.keyword1][0][:2], (self.key1, self.value))
ret = []
for val in p[self.keyword2]:
ret.append(val[:2])
self.assertEqual(ret, [(self.key1, self.value), (self.key2, self.value)])
def test_get(self):
v = Value()
v.valueKey = self.key1
v.serializedData = self.value
v.ttl = 10
testv = [v.SerializeToString()]
p = PersistentStorage(":memory:")
p[self.keyword1] = (self.key1, self.value, 10)
self.assertEqual(testv, p.get(self.keyword1))
def test_getSpecific(self):
p = PersistentStorage(":memory:")
p[self.keyword1] = (self.key1, self.value, 10)
self.assertEqual(self.value, p.getSpecific(self.keyword1, self.key1))
def test_delete(self):
p = PersistentStorage(":memory:")
p[self.keyword1] = (self.key1, self.value, 10)
p.delete(self.keyword1, self.key1)
self.assertEqual(p.get(self.keyword1), None)
def test_iterkeys(self):
p = PersistentStorage(":memory:")
p[self.keyword1] = (self.key1, self.value, 10)
for k in p.iterkeys():
self.assertEqual(k, self.keyword1)
def test_iteritems(self):
p = PersistentStorage(":memory:")
p[self.keyword1] = (self.key1, self.value, 10)
for k, v in p.iteritems(self.keyword1):
self.assertEqual((self.key1, self.value), (k, v))
def test_ttl(self):
p = PersistentStorage(":memory:")
p[self.keyword1] = (self.key1, self.value, .000000000001)
self.assertTrue(p.get(self.keyword1) is None)
class TTLDictTest(unittest.TestCase):
""" TTLDict tests """
def test_update_no_ttl(self):
""" Test update() call """
ttl_dict = TTLDict(None)
orig_dict = {'hello': 'world', 'intval': 3}
ttl_dict.update(orig_dict)
self.assertEqual(sorted(orig_dict.items()), sorted(ttl_dict.items()))
def test_len_clears_expired_items(self):
""" Test that calling len() removes expired items """
ttl_dict = TTLDict(-1, a=1, b=2)
self.assertEqual(ttl_dict._values.keys(), sorted(['a', 'b']))
self.assertEqual(len(ttl_dict), 0)
self.assertEqual(ttl_dict._values.keys(), [])
def test_expire_at(self):
""" Test expire_at """
ttl_dict = TTLDict(60)
ttl_dict['a'] = 100
ttl_dict['b'] = 123
self.assertEqual(ttl_dict['a'], 100)
self.assertEqual(ttl_dict['b'], 123)
self.assertEqual(len(ttl_dict), 2)
ttl_dict.expire_at('a', time.time())
self.assertRaises(KeyError, lambda: ttl_dict['a'])
self.assertEqual(len(ttl_dict), 1)
self.assertEqual(ttl_dict['b'], 123)
def test_set_ttl_get_ttl(self):
""" Test set_ttl() and get_ttl() """
ttl_dict = TTLDict(120, foo=3, bar=None)
self.assertEqual(sorted(ttl_dict), ['bar', 'foo'])
self.assertEqual(ttl_dict['foo'], 3)
self.assertEqual(ttl_dict['bar'], None)
self.assertEqual(len(ttl_dict), 2)
ttl_dict.set_ttl('foo', 3)
ttl_foo = ttl_dict.get_ttl('foo')
self.assertTrue(ttl_foo <= 3.0)
ttl_bar = ttl_dict.get_ttl('bar')
self.assertTrue(ttl_bar - ttl_foo > 100)
def test_set_ttl_key_error(self):
""" Test that set_ttl() raises KeyError """
ttl_dict = TTLDict(60)
self.assertRaises(KeyError, ttl_dict.set_ttl, 'missing', 10)
def test_get_ttl_key_error(self):
""" Test that get_ttl() raises KeyError """
ttl_dict = TTLDict(60)
self.assertRaises(KeyError, ttl_dict.get_ttl, 'missing')
def test_iter_empty(self):
""" Test that empty TTLDict can be iterated """
ttl_dict = TTLDict(60)
for key in ttl_dict:
self.fail("Iterating empty dictionary gave a key %r" % (key,))
def test_iter(self):
""" Test that TTLDict can be iterated """
ttl_dict = TTLDict(60)
ttl_dict.update(zip(range(10), range(10)))
self.assertEqual(len(ttl_dict), 10)
for key in ttl_dict:
self.assertEqual(key, ttl_dict[key])
def test_is_expired(self):
""" Test is_expired() call """
now = time.time()
ttl_dict = TTLDict(60, a=1, b=2)
self.assertFalse(ttl_dict.is_expired('a'))
self.assertFalse(ttl_dict.is_expired('a', now=now))
self.assertTrue(ttl_dict.is_expired('a', now=now + 61))
# remove=False, so nothing should be gone
self.assertEqual(len(ttl_dict), 2)
| {
"repo_name": "hauxir/OpenBazaar-Server",
"path": "dht/tests/test_storage.py",
"copies": "1",
"size": "7596",
"license": "mit",
"hash": -8088301562851039000,
"line_mean": 34.661971831,
"line_max": 81,
"alpha_frac": 0.5922854134,
"autogenerated": false,
"ratio": 3.3184796854521625,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9410478828383824,
"avg_score": 0.00005725409366769724,
"num_lines": 213
} |
__author__ = 'chris'
import unittest
from random import randint, random
import numpy as np
from six.moves import xrange
from sympy import symbols, diff, exp, Piecewise
from pyquant.tests.mixins import GaussianMixin
from pyquant import peaks
from pyquant import cpeaks
def get_gauss_value(x, amp, mu, std):
return amp*np.exp(-(x - mu)**2/(2*std**2))
class MathTests(GaussianMixin, unittest.TestCase):
def setUp(self):
super(MathTests, self).setUp()
self.std_2, self.std2_2 = 0.5, 0.75
self.one_bigauss_params = np.array([self.amp, self.mu, self.std, self.std_2], dtype=np.float)
self.two_bigauss_params = np.array([self.amp, self.mu, self.std, self.std_2, self.amp, self.mu2, self.std, self.std2_2], dtype=np.float)
self.one_bigauss = peaks.bigauss_ndim(self.x, self.one_bigauss_params)
self.two_bigauss = peaks.bigauss_ndim(self.x, self.two_bigauss_params)
def test_jacobians(self):
one_gauss_jac = peaks.gauss_jac(self.one_gauss_params, self.x, self.one_gauss, False)
self.assertEqual(one_gauss_jac.tolist(), np.zeros_like(self.one_gauss_params).tolist())
two_gauss_jac = peaks.gauss_jac(self.two_gauss_params, self.x, self.two_gauss, False)
self.assertEqual(two_gauss_jac.tolist(), np.zeros_like(self.two_gauss_params).tolist())
one_bigauss_jac = peaks.bigauss_jac(self.one_bigauss_params, self.x, self.one_bigauss, False)
self.assertEqual(one_bigauss_jac.tolist(), np.zeros_like(self.one_bigauss_params).tolist())
two_bigauss_jac = peaks.bigauss_jac(self.two_bigauss_params, self.x, self.two_bigauss, False)
self.assertEqual(two_bigauss_jac.tolist(), np.zeros_like(self.two_bigauss_params).tolist())
y, x, a, u, s1, s1_2, a2, u2, s2, s2_2, a3, u3, s3, s3_2 = symbols('y x a u s1 s1_2 a2 u2 s2 s2_2 a3 u3 s3 s3_2')
three_gauss = (y - (a * exp(-(u - x) ** 2 / (2 * s1 ** 2)) + a2 * exp(-(u2 - x) ** 2 / (2 * s2 ** 2)) + a3 * exp(-(u3 - x) ** 2 / (2 * s3 ** 2)))) ** 2
three_gauss2 = (y - (a * exp(-(u - x) ** 2 / (2 * s1_2 ** 2)) + a2 * exp(-(u2 - x) ** 2 / (2 * s2_2 ** 2)) + a3 * exp(-(u3 - x) ** 2 / (2 * s3_2 ** 2)))) ** 2
bigauss = Piecewise((three_gauss, x<u))
deriv_store = {}
for i in xrange(2):
subs = [
('a', random()),
('u', randint(1,10)),
('s1', random()),
('a2', random()),
('u2', randint(12, 20)),
('s2', random()),
('a3', random()),
('u3', randint(22, 30)),
('s3', random()),
]
params = np.array([i[1] for i in subs], dtype=float)
noisy_params = params + 2*np.random.rand(params.shape[0])
gauss_x = np.linspace(-10, 40, 100)
gauss_y = peaks.gauss_ndim(gauss_x, noisy_params)
jacobian = peaks.gauss_jac(params, gauss_x, gauss_y, False)
for var_index, var in enumerate([a,u,s1,a2,u2,s2,a3,u3,s3]):
deriv = deriv_store.setdefault(var, diff(three_gauss, var))
pq_jac = jacobian[var_index]
sympy_jacobian = sum([deriv.subs(dict(subs, **{'x': xi, 'y': yi})) for xi, yi in zip(gauss_x, gauss_y)])
np.testing.assert_allclose(pq_jac, np.array(sympy_jacobian, dtype=float),
err_msg='d{} - pq: {}, sympy: {}'.format(var, pq_jac,
sympy_jacobian), atol=1e-4)
def test_hessians(self):
y, x, a, u, s1, a2, u2, s2, a3, u3, s3 = symbols('y x a u s1 a2 u2 s2 a3 u3 s3')
three_gauss = (y - (
a * exp(-(u - x) ** 2 / (2 * s1 ** 2)) + a2 * exp(-(u2 - x) ** 2 / (2 * s2 ** 2)) + a3 * exp(
-(u3 - x) ** 2 / (2 * s3 ** 2)))) ** 2
hess_store = {}
for _ in xrange(2):
subs = [
('a', random()+0.5),
('u', random()*10),
('s1', random()*3+2),
('a2', random()+0.5),
('u2', random()*10+10),
('s2', random()*3+2),
('a3', random()+0.5),
('u3', random()*10+20),
('s3', random()*3+2),
]
params = np.array([i[1] for i in subs], dtype=float)
noisy_params = params + 2 * np.random.rand(params.shape[0])
gauss_x = np.linspace(-10, 40, 100)
gauss_y = cpeaks.gauss_ndim(gauss_x, noisy_params)
hessian = cpeaks.gauss_hess(params, gauss_x, gauss_y)
for var_index, var in enumerate([a, u, s1, a2, u2, s2, a3, u3, s3]):
for var_index2, var2 in enumerate([a, u, s1, a2, u2, s2, a3, u3, s3]):
deriv = hess_store.setdefault((var, var2), diff(three_gauss, var, var2))
sympy_hessian = sum([deriv.subs(dict(subs, **{'x': xi, 'y': yi})) for xi, yi in zip(gauss_x, gauss_y)])
pq_hess = hessian[var_index, var_index2]
np.testing.assert_allclose(pq_hess, np.array(sympy_hessian, dtype=float), err_msg='d{}d{} - pq: {}, sympy: {}'.format(var, var2, pq_hess, sympy_hessian), atol=1e-4)
if __name__ == '__main__':
unittest.main()
| {
"repo_name": "pandeylab/pyquant",
"path": "pyquant/tests/test_math.py",
"copies": "1",
"size": "5301",
"license": "mit",
"hash": -7542228579142530000,
"line_mean": 53.0918367347,
"line_max": 184,
"alpha_frac": 0.5155631013,
"autogenerated": false,
"ratio": 2.9631078814980434,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8955839488732193,
"avg_score": 0.004566298813169867,
"num_lines": 98
} |
__author__ = 'chris'
class HD44780:
def __init__(self, pin_rs=4, pin_e=17, pins_db=[18, 22, 23, 24]):
# Initialise GPIO
self.pin_rs=pin_rs
self.pin_e=pin_e
self.pins_db=pins_db
GPIO.setmode(GPIO.BCM)
GPIO.setup(self.pin_e, GPIO.OUT)
GPIO.setup(self.pin_rs, GPIO.OUT)
for pin in self.pins_db:
GPIO.setup(pin, GPIO.OUT)
self.clear()
def clear(self):
# Blank / Reset LCD
self.cmd(0x33) # $33 8-bit mode
self.cmd(0x32) # $32 8-bit mode
self.cmd(0x28) # $28 8-bit mode
self.cmd(0x0C) # $0C 8-bit mode
self.cmd(0x06) # $06 8-bit mode
self.cmd(0x01) # $01 8-bit mode
def cmd(self, bits, char_mode=False):
# Send command to LCD
sleep(0.001)
bits=bin(bits)[2:].zfill(8)
GPIO.output(self.pin_rs, char_mode)
for pin in self.pins_db:
GPIO.output(pin, False)
for i in range(4):
if bits[i] == "1":
GPIO.output(self.pins_db[::-1][i], True)
GPIO.output(self.pin_e, True)
GPIO.output(self.pin_e, False)
for pin in self.pins_db:
GPIO.output(pin, False)
for i in range(4,8):
if bits[i] == "1":
GPIO.output(self.pins_db[::-1][i-4], True)
GPIO.output(self.pin_e, True)
GPIO.output(self.pin_e, False)
def message(self, text):
# Send string to LCD. Newline wraps to second line
for char in text:
if char == '\n':
self.cmd(0xC0) # next line
else:
self.cmd(ord(char),True)
| {
"repo_name": "siliconchris1973/PyLCDWriter",
"path": "hd44780_class.py",
"copies": "1",
"size": "1666",
"license": "apache-2.0",
"hash": -4638867005212340000,
"line_mean": 26.3114754098,
"line_max": 69,
"alpha_frac": 0.5108043217,
"autogenerated": false,
"ratio": 3.0681399631675874,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.90430350179349,
"avg_score": 0.007181853386537511,
"num_lines": 61
} |
__author__ = 'chris'
"""
Copyright (c) 2015 Chris Pacia
"""
import enum
import bitcoin
import traceback
from twisted.internet.protocol import Protocol, ClientFactory
from twisted.internet import reactor, task
from bitcoin.messages import *
from bitcoin.core import b2lx
from bitcoin.net import CInv
from bitcoin.wallet import CBitcoinAddress
from extensions import msg_version2, msg_filterload, msg_merkleblock, MsgHeader
from io import BytesIO
from log import Logger
State = enum.Enum('State', ('CONNECTING', 'DOWNLOADING', 'CONNECTED', 'SHUTDOWN'))
PROTOCOL_VERSION = 70002
messagemap["merkleblock"] = msg_merkleblock
class BitcoinProtocol(Protocol):
def __init__(self, user_agent, inventory, subscriptions, bloom_filter, blockchain, download_listener):
self.user_agent = user_agent
self.inventory = inventory
self.subscriptions = subscriptions
self.bloom_filter = bloom_filter
self.blockchain = blockchain
self.download_count = 0
self.download_tracker = [0, 0]
self.download_listener = download_listener
self.timeouts = {}
self.callbacks = {}
self.state = State.CONNECTING
self.version = None
self.buffer = ""
self.log = Logger(system=self)
def connectionMade(self):
"""
Send the version message and start the handshake
"""
self.timeouts["verack"] = reactor.callLater(5, self.response_timeout, "verack")
self.timeouts["version"] = reactor.callLater(5, self.response_timeout, "version")
msg_version2(PROTOCOL_VERSION, self.user_agent, nStartingHeight=self.blockchain.get_height() if self.blockchain else -1).stream_serialize(self.transport)
def dataReceived(self, data):
self.buffer += data
header = MsgHeader.from_bytes(self.buffer)
if len(self.buffer) < header.msglen + 24:
return
try:
stream = BytesIO(self.buffer)
m = MsgSerializable.stream_deserialize(stream)
self.buffer = stream.read()
if m.command == "verack":
self.timeouts["verack"].cancel()
del self.timeouts["verack"]
if "version" not in self.timeouts:
self.on_handshake_complete()
elif m.command == "version":
self.version = m
if m.nVersion < 70001 or m.nServices != 1:
self.transport.loseConnection()
self.timeouts["version"].cancel()
del self.timeouts["version"]
msg_verack().stream_serialize(self.transport)
if self.blockchain is not None:
self.to_download = self.version.nStartingHeight - self.blockchain.get_height()
if "verack" not in self.timeouts:
self.on_handshake_complete()
elif m.command == "getdata":
for item in m.inv:
if item.hash in self.inventory and item.type == 1:
transaction = msg_tx()
transaction.tx = self.inventory[item.hash]
transaction.stream_serialize(self.transport)
elif m.command == "inv":
for item in m.inv:
# This is either an announcement of tx we broadcast ourselves or a tx we have already downloaded.
# In either case we only need to callback here.
if item.type == 1 and item.hash in self.subscriptions:
self.subscriptions[item.hash]["callback"](item.hash)
# This is the first time we are seeing this txid. Let's download it and check to see if it sends
# coins to any addresses in our subscriptions.
elif item.type == 1 and item.hash not in self.inventory:
self.timeouts[item.hash] = reactor.callLater(5, self.response_timeout, item.hash)
cinv = CInv()
cinv.type = 1
cinv.hash = item.hash
getdata_packet = msg_getdata()
getdata_packet.inv.append(cinv)
getdata_packet.stream_serialize(self.transport)
# The peer announced a new block. Unlike txs, we should download it, even if we've previously
# downloaded it from another peer, to make sure it doesn't contain any txs we didn't know about.
elif item.type == 2 or item.type == 3:
if self.state == State.DOWNLOADING:
self.download_tracker[0] += 1
cinv = CInv()
cinv.type = 3
cinv.hash = item.hash
getdata_packet = msg_getdata()
getdata_packet.inv.append(cinv)
getdata_packet.stream_serialize(self.transport)
if self.state != State.DOWNLOADING:
self.log.debug("Peer %s:%s announced new %s %s" % (self.transport.getPeer().host, self.transport.getPeer().port, CInv.typemap[item.type], b2lx(item.hash)))
elif m.command == "tx":
if m.tx.GetHash() in self.timeouts:
self.timeouts[m.tx.GetHash()].cancel()
for out in m.tx.vout:
try:
addr = str(CBitcoinAddress.from_scriptPubKey(out.scriptPubKey))
except Exception:
addr = None
if addr in self.subscriptions:
if m.tx.GetHash() not in self.subscriptions:
# It's possible the first time we are hearing about this tx is following block
# inclusion. If this is the case, let's make sure we include the correct number
# of confirmations.
in_blocks = self.inventory[m.tx.GetHash()] if m.tx.GetHash() in self.inventory else []
confirms = []
if len(in_blocks) > 0:
for block in in_blocks:
confirms.append(self.blockchain.get_confirmations(block))
self.subscriptions[m.tx.GetHash()] = {
"announced": 0,
"ann_threshold": self.subscriptions[addr][0],
"confirmations": max(confirms) if len(confirms) > 0 else 0,
"last_confirmation": 0,
"callback": self.subscriptions[addr][1],
"in_blocks": in_blocks,
"tx": m.tx
}
self.subscriptions[addr][1](m.tx.GetHash())
if m.tx.GetHash() in self.inventory:
del self.inventory[m.tx.GetHash()]
elif m.command == "merkleblock":
if self.blockchain is not None:
self.blockchain.process_block(m.block)
if self.state != State.DOWNLOADING:
self.blockchain.save()
# check for block inclusion of subscribed txs
for match in m.block.get_matched_txs():
if match in self.subscriptions:
self.subscriptions[match]["in_blocks"].append(m.block.GetHash())
else:
# stick the hash here in case this is the first we are hearing about this tx.
# when the tx comes over the wire after this block, we will append this hash.
self.inventory[match] = [m.block.GetHash()]
# run through subscriptions and callback with updated confirmations
for txid in self.subscriptions:
try:
confirms = []
for block in self.subscriptions[txid]["in_blocks"]:
confirms.append(self.blockchain.get_confirmations(block))
self.subscriptions[txid]["confirmations"] = max(confirms)
self.subscriptions[txid]["callback"](txid)
except Exception:
pass
# If we are in the middle of an initial chain download, let's check to see if we have
# either reached the end of the download or if we need to loop back around and make
# another get_blocks call.
if self.state == State.DOWNLOADING:
self.download_count += 1
percent = int((self.download_count / float(self.to_download))*100)
if self.download_listener is not None:
self.download_listener.progress(percent, self.download_count)
self.download_listener.on_block_downloaded((self.transport.getPeer().host, self.transport.getPeer().port), header, self.to_download - self.download_count + 1)
if percent == 100:
if self.download_listener is not None:
self.download_listener.download_complete()
self.log.info("Chain download 100% complete")
self.download_tracker[1] += 1
# We've downloaded every block in the inv packet and still have more to go.
if (self.download_tracker[0] == self.download_tracker[1] and
self.blockchain.get_height() < self.version.nStartingHeight):
if self.timeouts["download"].active():
self.timeouts["download"].cancel()
self.download_blocks(self.callbacks["download"])
# We've downloaded everything so let's callback to the client.
elif self.blockchain.get_height() >= self.version.nStartingHeight:
self.blockchain.save()
self.state = State.CONNECTED
self.callbacks["download"]()
if self.timeouts["download"].active():
self.timeouts["download"].cancel()
elif m.command == "headers":
if self.timeouts["download"].active():
self.timeouts["download"].cancel()
for header in m.headers:
# If this node sent a block with no parent then disconnect from it and callback
# on client.check_for_more_blocks.
if self.blockchain.process_block(header) is None:
self.blockchain.save()
self.callbacks["download"]()
self.transport.loseConnection()
return
self.download_count += 1
percent = int((self.download_count / float(self.to_download))*100)
if self.download_listener is not None:
self.download_listener.progress(percent, self.download_count)
self.download_listener.on_block_downloaded((self.transport.getPeer().host, self.transport.getPeer().port), header, self.to_download - self.download_count + 1)
if percent == 100:
if self.download_listener is not None:
self.download_listener.download_complete()
self.log.info("Chain download 100% complete")
# The headers message only comes in batches of 500 blocks. If we still have more blocks to download
# loop back around and call get_headers again.
if self.blockchain.get_height() < self.version.nStartingHeight:
self.download_blocks(self.callbacks["download"])
else:
self.blockchain.save()
self.callbacks["download"]()
self.state = State.CONNECTED
elif m.command == "ping":
msg_pong(nonce=m.nonce).stream_serialize(self.transport)
else:
self.log.debug("Received message %s from %s:%s" % (m.command, self.transport.getPeer().host, self.transport.getPeer().port))
if len(self.buffer) >= 24: self.dataReceived("")
except Exception:
traceback.print_exc()
def on_handshake_complete(self):
self.log.info("Connected to peer %s:%s" % (self.transport.getPeer().host, self.transport.getPeer().port))
self.load_filter()
self.state = State.CONNECTED
def response_timeout(self, id):
if id == "download":
self.callbacks["download"]()
del self.timeouts[id]
for t in self.timeouts.values():
if t.active():
t.cancel()
if self.state != State.SHUTDOWN:
self.log.warning("Peer %s:%s unresponsive, disconnecting..." % (self.transport.getPeer().host, self.transport.getPeer().port))
self.transport.loseConnection()
self.state = State.SHUTDOWN
def download_blocks(self, callback):
if self.state == State.CONNECTING:
return task.deferLater(reactor, 1, self.download_blocks, callback)
if self.blockchain is not None:
if self.download_listener is not None and self.download_count == 0:
self.download_listener.download_started((self.transport.getPeer().host, self.transport.getPeer().port), self.to_download)
self.log.info("Downloading blocks from %s:%s" % (self.transport.getPeer().host, self.transport.getPeer().port))
self.state = State.DOWNLOADING
self.callbacks["download"] = callback
self.timeouts["download"] = reactor.callLater(30, self.response_timeout, "download")
if len(self.subscriptions) > 0:
get = msg_getblocks()
self.download_tracker = [0, 0]
else:
get = msg_getheaders()
get.locator = self.blockchain.get_locator()
get.stream_serialize(self.transport)
def send_message(self, message_obj):
if self.state == State.CONNECTING:
return task.deferLater(reactor, 1, self.send_message, message_obj)
message_obj.stream_serialize(self.transport)
def load_filter(self):
msg_filterload(filter=self.bloom_filter).stream_serialize(self.transport)
def connectionLost(self, reason):
self.state = State.SHUTDOWN
self.log.info("Connection to %s:%s closed" % (self.transport.getPeer().host, self.transport.getPeer().port))
class PeerFactory(ClientFactory):
def __init__(self, params, user_agent, inventory, subscriptions, bloom_filter, disconnect_cb, blockchain, download_listener):
self.params = params
self.user_agent = user_agent
self.inventory = inventory
self.subscriptions = subscriptions
self.bloom_filter = bloom_filter
self.cb = disconnect_cb
self.protocol = None
self.blockchain = blockchain
self.download_listener = download_listener
bitcoin.SelectParams(params)
self.log = Logger(system=self)
def buildProtocol(self, addr):
self.protocol = BitcoinProtocol(self.user_agent, self.inventory, self.subscriptions, self.bloom_filter, self.blockchain, self.download_listener)
return self.protocol
def clientConnectionFailed(self, connector, reason):
self.log.warning("Connection failed, will try a different node")
self.cb(self)
def clientConnectionLost(self, connector, reason):
self.cb(self)
| {
"repo_name": "cpacia/pybitcoin",
"path": "pybitcoin/protocol.py",
"copies": "1",
"size": "16097",
"license": "mit",
"hash": 4268630953773050400,
"line_mean": 49.460815047,
"line_max": 186,
"alpha_frac": 0.5471827049,
"autogenerated": false,
"ratio": 4.676641487507263,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5723824192407263,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
"""
Copyright (c) 2015 Chris Pacia
"""
import random
import struct
import bitcoin
import math
from bitcoin.core import CBlockHeader, b2x
from bitcoin.messages import msg_version, MsgSerializable
from bitcoin.core.serialize import VarStringSerializer, VarIntSerializer, ser_read
from bitcoin.bloom import CBloomFilter
from hashlib import sha256
from io import BytesIO
PROTO_VERSION = 70002
class MsgHeader(MsgSerializable):
"""
A class for just the message header.
"""
def __init__(self, command, msglen, checksum):
self.params = bitcoin.params.MESSAGE_START
self.command = command
self.msglen = msglen
self.checksum = checksum
@classmethod
def from_bytes(cls, b, protover=PROTO_VERSION):
f = BytesIO(b)
return MsgHeader.stream_deserialize(f, protover=protover)
@classmethod
def stream_deserialize(cls, f, protover=PROTO_VERSION):
recvbuf = ser_read(f, 4 + 12 + 4 + 4)
# check magic
if recvbuf[:4] != bitcoin.params.MESSAGE_START:
raise ValueError("Invalid message start '%s', expected '%s'" %
(b2x(recvbuf[:4]), b2x(bitcoin.params.MESSAGE_START)))
# remaining header fields: command, msg length, checksum
command = recvbuf[4:4+12].split(b"\x00", 1)[0]
msglen = struct.unpack(b"<i", recvbuf[4+12:4+12+4])[0]
checksum = recvbuf[4+12+4:4+12+4+4]
return MsgHeader(command, msglen, checksum)
class msg_version2(msg_version):
"""
An extension of the python-bitcoinlib message class which bumps
the version number and adds the relay boolean. Also changes the
default services to zero.
"""
def __init__(self, protover=PROTO_VERSION, user_agent="/pyBitcoin0.1/", nStartingHeight=-1):
super(msg_version2, self).__init__(protover)
self.nStartingHeight = nStartingHeight
self.relay = False
self.nServices = 0
self.strSubVer = user_agent
self.addrFrom.nServices = 0
self.addrFrom.ip = "127.0.0.1"
self.addrTo.ip = "127.0.0.1"
def msg_ser(self, f):
f.write(struct.pack(b"<i", self.nVersion))
f.write(struct.pack(b"<Q", self.nServices))
f.write(struct.pack(b"<q", self.nTime))
self.addrTo.stream_serialize(f, True)
self.addrFrom.stream_serialize(f, True)
f.write(struct.pack(b"<Q", self.nNonce))
VarStringSerializer.stream_serialize(self.strSubVer, f)
f.write(struct.pack(b"<i", self.nStartingHeight))
f.write(struct.pack('?', self.relay))
class msg_filterload(MsgSerializable):
"""
A filter load message that is missing from python-bitcoinlib
"""
command = b"filterload"
def __init__(self, protover=PROTO_VERSION, filter=None):
super(msg_filterload, self).__init__(protover)
self.protover = protover
if not filter:
self.filter = BloomFilter(3, 0.01, random.getrandbits(32), CBloomFilter.UPDATE_NONE)
else:
self.filter = filter
@classmethod
def msg_deser(cls, f, protover=PROTO_VERSION):
c = cls()
c.filter = CBloomFilter.stream_deserialize(f)
return c
def msg_ser(self, f):
self.filter.stream_serialize(f)
def __repr__(self):
return "msg_filterload(vData=%i nHashFunctions=%i nTweak=%i nFlags=%i" % (self.filter.vData, self.filter.nHashFunctions, self.filter.nTweak, self.filter.nFlags)
class BloomFilter(CBloomFilter):
"""
An extension of the python-bitcoinlib CBloomFilter class to allow for
removal of inserted objects.
"""
def __init__(self, nElements, nFPRate, nTweak, nFlags):
super(BloomFilter, self).__init__(nElements, nFPRate, nTweak, nFlags)
self._elements = []
self.nFPRate = nFPRate
self.nElements = nElements
__bit_mask = bytearray([0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80])
def insert(self, elem):
"""Insert an element in the filter.
elem may be a COutPoint or bytes
"""
if isinstance(elem, bitcoin.core.COutPoint):
elem = elem.serialize()
if len(self.vData) == 1 and self.vData[0] == 0xff:
return
for i in range(0, self.nHashFuncs):
nIndex = self.bloom_hash(i, elem)
# Sets bit nIndex of vData
self.vData[nIndex >> 3] |= self.__bit_mask[7 & nIndex]
self._elements.append(elem)
def remove(self, elem):
"""
Remove an element from the bloom filter. Works by clearing the filter and re-inserting
the elements that weren't removed.
"""
LN2SQUARED = 0.4804530139182014246671025263266649717305529515945455
LN2 = 0.6931471805599453094172321214581765680755001343602552
if elem in self._elements:
self._elements.remove(elem)
self.vData = bytearray(int(min(-1 / LN2SQUARED * self.nElements * math.log(self.nFPRate), self.MAX_BLOOM_FILTER_SIZE * 8) / 8))
self.nHashFuncs = int(min(len(self.vData) * 8 / self.nElements * LN2, self.MAX_HASH_FUNCS))
for element in self._elements:
self.insert(element)
class CMerkleBlock(CBlockHeader):
"""
The merkle block returned to spv clients when a filter is set on the remote peer.
"""
__slots__ = ['nTX', 'vHashes', 'vFlags']
def __init__(self, nVersion=3, hashPrevBlock=b'\x00'*32, hashMerkleRoot=b'\x00'*32, nTime=0, nBits=0, nNonce=0, nTX=0, vHashes=(), vFlags=()):
"""Create a new block"""
super(CMerkleBlock, self).__init__(nVersion, hashPrevBlock, hashMerkleRoot, nTime, nBits, nNonce)
object.__setattr__(self, 'nTX', nTX)
object.__setattr__(self, 'vHashes', vHashes)
object.__setattr__(self, 'vFlags', vFlags)
@classmethod
def stream_deserialize(cls, f):
def bits(f, n):
ret = []
bytes = (ord(b) for b in f.read(n))
for b in bytes:
for i in xrange(8):
ret.append((b >> i) & 1)
return ret
self = super(CMerkleBlock, cls).stream_deserialize(f)
nTX = struct.unpack('<L', ser_read(f, 4))[0]
nHashes = VarIntSerializer.stream_deserialize(f)
vHashes = []
for i in range(nHashes):
vHashes.append(ser_read(f, 32))
nFlags = VarIntSerializer.stream_deserialize(f)
vFlags = bits(f, nFlags)
object.__setattr__(self, 'nTX', nTX)
object.__setattr__(self, 'vHashes', vHashes)
object.__setattr__(self, 'vFlags', vFlags)
return self
def stream_serialize(self, f):
super(CMerkleBlock, self).stream_serialize(f)
f.write(struct.pack('<L', self.nTX))
VarIntSerializer.stream_serialize(len(self.vHashes), f)
for hash in self.vHashes:
f.write(hash)
VarIntSerializer.stream_serialize(len(self.vFlags)/8, f)
bin_string = ""
for bit in self.vFlags:
bin_string += str(bit)
if len(bin_string) == 8:
f.write(struct.pack('B', int(bin_string[::-1], 2)))
bin_string = ""
def get_matched_txs(self):
"""
Return a list of transaction hashes that matched the filter. These txs
have been validated against the merkle tree structure and are definitely
in the block. However, the block hash still needs to be checked against
the best chain in the block database.
"""
# TODO: perform a number of checks to make sure everything is formatted properly
def getTreeWidth(transaction_count, height):
return (transaction_count + (1 << height) - 1) >> height
matched_hashes = []
def recursive_extract_hashes(height, pos):
parent_of_match = bool(self.vFlags.pop(0))
if height == 0 or not parent_of_match:
hash = self.vHashes.pop(0)
if height == 0 and parent_of_match:
matched_hashes.append(hash)
return hash
else:
left = recursive_extract_hashes(height - 1, pos * 2)
if pos * 2 + 1 < getTreeWidth(self.nTX, height-1):
right = recursive_extract_hashes(height - 1, pos * 2 + 1)
if left == right:
raise Exception("Invalid Merkle Tree")
else:
right = left
return sha256(sha256(left+right).digest()).digest()
height = 0
while getTreeWidth(self.nTX, height) > 1:
height += 1
calculated_root = recursive_extract_hashes(height, 0)
if calculated_root == self.get_header().hashMerkleRoot:
return matched_hashes
else:
return None
def get_header(self):
"""Return the block header
Returned header is a new object.
"""
return CBlockHeader(nVersion=self.nVersion,
hashPrevBlock=self.hashPrevBlock,
hashMerkleRoot=self.hashMerkleRoot,
nTime=self.nTime,
nBits=self.nBits,
nNonce=self.nNonce)
def GetHash(self):
"""Return the block hash
Note that this is the hash of the header, not the entire serialized
block.
"""
try:
return self._cached_GetHash
except AttributeError:
_cached_GetHash = self.get_header().GetHash()
object.__setattr__(self, '_cached_GetHash', _cached_GetHash)
return _cached_GetHash
class msg_merkleblock(MsgSerializable):
"""
The MerkleBlock network message
"""
command = b"merkleblock"
def __init__(self, protover=PROTO_VERSION):
super(msg_merkleblock, self).__init__(protover)
self.block = CMerkleBlock()
@classmethod
def msg_deser(cls, f, protover=PROTO_VERSION):
c = cls()
c.block = CMerkleBlock.stream_deserialize(f)
return c
def msg_ser(self, f):
self.block.stream_serialize(f)
def __repr__(self):
return "msg_merkleblock(header=%s)" % (repr(self.block.get_header()))
| {
"repo_name": "cpacia/pybitcoin",
"path": "pybitcoin/extensions.py",
"copies": "1",
"size": "10303",
"license": "mit",
"hash": -9081419640173929000,
"line_mean": 34.2842465753,
"line_max": 168,
"alpha_frac": 0.5954576337,
"autogenerated": false,
"ratio": 3.6380649717514126,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47335226054514123,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
from bitcoin import *
from subspace.pyelliptic import *
from subspace import payload
from subspace.utils import digest
from pyelliptic.hash import hmac_sha256
class MessageEncoder(object):
def __init__(self, recipient_pub, sender_priv, message, range):
"""
Args:
recipient_pub: a hex encoded compressed public key
sender_priv: a hex encoded private key
message: the message as string
range: the range the recipient's public key would fall within.
"""
pub = decode_pubkey(recipient_pub, formt='hex_compressed')
pubkey_hex = encode_pubkey(pub, formt="hex")
pubkey_raw = changebase(pubkey_hex[2:],16,256,minlen=64)
self.pubkey_hex = recipient_pub
self.pubkey = '\x02\xca\x00 '+pubkey_raw[:32]+'\x00 '+pubkey_raw[32:]
self.privkey_hex = sender_priv
self.privkey = encode_privkey(sender_priv, "bin")
self.message = message
self.length = 0
self.range = range
pubkey = privkey_to_pubkey(sender_priv)
pubkey_raw = changebase(pubkey[2:],16,256,minlen=64)
pubkey = '\x02\xca\x00 '+pubkey_raw[:32]+'\x00 '+pubkey_raw[32:]
self.alice = ECC(curve="secp256k1", raw_privkey=self.privkey, pubkey=pubkey)
self.bob = ECC(curve='secp256k1', pubkey=self.pubkey)
self.shared_secret = self.alice.get_ecdh_key(self.pubkey)[:32]
def split_and_encrypt(self):
messages = []
data = payload.MessageData()
data.messageID = digest(os.urandom(32))
data.sequence = 0
data.senderKey = encode_pubkey(privkey_to_pubkey(self.privkey_hex), "hex_compressed")
data.timeStamp = int(time.time())
data.unencryptedMessage = self.message
def pad():
pad_len = 500 - len(data.SerializeToString())
rand_pad = os.urandom(pad_len)
data.pad = rand_pad
excess = len(data.SerializeToString()) - 500
data.pad = rand_pad[excess:]
sign_message(data.SerializeToString())
def sign_message(serialized_message):
hmac = hmac_sha256(self.shared_secret, serialized_message)
signed_payload = payload.SignedPayload()
signed_payload.serializedMessageData = serialized_message
signed_payload.HMac = hmac
messages.append(self.alice.encrypt(signed_payload.SerializeToString(), self.pubkey))
overage = 1
while overage > 0:
overage = len(data.SerializeToString()) - 500
if overage < 0:
pad()
elif overage == 0:
sign_message(data.SerializeToString())
elif overage > 0:
data.unencryptedMessage = self.message[:len(data.unencryptedMessage) - overage]
sign_message(data.SerializeToString())
self.message = self.message[len(data.unencryptedMessage):]
data.unencryptedMessage = self.message
data.sequence += 1
return messages
def create_keys(self, ciphertexts):
"""
We do some trivial brute forcing to get the hash of the message within the same range as the
recipient's public key
"""
messages = {}
for ciphertext in ciphertexts:
entropy = os.urandom(32).encode("hex")
nonce = 0
if self.range == 0:
nonce_hash = digest(entropy + str(0))
message_hash = sha256(ciphertext + nonce_hash)
key = message_hash
else:
low = long(self.pubkey_hex[2:66], 16) - self.range / 4
high = long(self.pubkey_hex[2:66], 16) + self.range / 4
while True:
nonce_hash = digest(entropy + str(nonce))
message_hash = sha256(ciphertext + nonce_hash)
long_hash = long(message_hash, 16)
if low < long_hash < high:
key = message_hash
break
nonce += 1
messages[key] = ciphertext + nonce_hash
return messages
def create_messages(self):
ciphertexts = self.split_and_encrypt()
messages = self.create_keys(ciphertexts)
return messages
class MessageDecoder(object):
def __init__(self, private_key, messageDic):
self.messageDic = messageDic
self.priv_bin = encode_privkey(private_key, "bin")
pubkey = privkey_to_pubkey(private_key)
pubkey_raw = changebase(pubkey[2:],16,256,minlen=64)
pubkey = '\x02\xca\x00 '+pubkey_raw[:32]+'\x00 '+pubkey_raw[32:]
self.bob = ECC(curve="secp256k1", raw_privkey=self.priv_bin, pubkey=pubkey)
def get_messages(self):
# First try to decrypt all the messages
messages = {}
for k, v in self.messageDic.items():
# Don't bother attempting to decrypt if the hash doesn't match
if binascii.unhexlify(sha256(v[1])) == k:
ciphertext = v[1][:len(v[1]) - 20]
try:
messages[k] = self.bob.decrypt(ciphertext)
except:
None
# Parse each decrypted message and validate the hmac
spayload = payload.SignedPayload()
grouped_messages = {}
for signed_payload in messages.values():
spayload.ParseFromString(signed_payload)
data = payload.MessageData()
data.ParseFromString(spayload.serializedMessageData)
sender_pub = data.senderKey
pub = decode_pubkey(sender_pub, formt='hex_compressed')
pubkey_hex = encode_pubkey(pub, formt="hex")
pubkey_raw = changebase(pubkey_hex[2:],16,256,minlen=64)
pubkey = '\x02\xca\x00 '+pubkey_raw[:32]+'\x00 '+pubkey_raw[32:]
shared_secret = self.bob.get_ecdh_key(pubkey)[:32]
hmac = hmac_sha256(shared_secret, spayload.serializedMessageData)
# If the hmac is valid, group the messages by message ID
if hmac == spayload.HMac:
if data.messageID not in grouped_messages.keys():
grouped_messages[data.messageID] = [data]
else:
mlist = grouped_messages[data.messageID]
mlist.append(data)
grouped_messages[data.messageID] = mlist
# Run through the grouped messages and reconstruct the plaintext in the proper order
reconstructed_messages = []
for message_list in grouped_messages.values():
m = {}
full_message = ""
for i in range(0, len(message_list)):
for data in message_list:
if data.sequence == i:
full_message += data.unencryptedMessage
m["sender"] = message_list[0].senderKey
m["timestamp"] = message_list[0].timeStamp
m["plaintext"] = full_message
reconstructed_messages.append(m)
return reconstructed_messages | {
"repo_name": "cpacia/Subspace",
"path": "subspace/message.py",
"copies": "1",
"size": "7120",
"license": "mit",
"hash": 2535972756236579000,
"line_mean": 40.8882352941,
"line_max": 100,
"alpha_frac": 0.5755617978,
"autogenerated": false,
"ratio": 4.115606936416185,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5191168734216185,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
from log import Logger
from protos.message import Command, FOLLOW, UNFOLLOW
from twisted.internet import reactor, task
RECONNECTIONS = 100
MALFORMATED = 110
SCORES = {
FOLLOW: 0,
UNFOLLOW: 0
}
class BanScore(object):
def __init__(self, multiplexer, ban_time=86400):
self.multiplexer = multiplexer
self.ban_time = ban_time
self.peers = {}
self.scoring_loop = task.LoopingCall(self.adjust_scores)
self.scoring_loop.start(30, now=False)
self.log = Logger(system=self)
def process_message(self, peer, message):
if peer[0] not in self.peers:
self.peers[peer[0]] = SCORES.copy()
try:
if message.command == FOLLOW:
self.peers[peer[0]][FOLLOW] += 1
if self.peers[peer[0]][FOLLOW] > 3:
self.ban(peer, FOLLOW)
elif message.command == UNFOLLOW:
self.peers[peer[0]][UNFOLLOW] += 1
if self.peers[peer[0]][UNFOLLOW] > 3:
self.ban(peer, UNFOLLOW)
except Exception:
self.log.warning("Exception processing banscore")
def ban(self, peer, message_type):
reason = Command.Name(message_type)
self.log.warning("Banned %s. Reason: too many %s messages." %
(peer[0], reason))
self.multiplexer.ban_ip(peer[0])
if peer in self.multiplexer:
self.multiplexer[peer].shutdown()
reactor.callLater(self.ban_time, self.multiplexer.remove_ip_ban, peer[0])
def adjust_scores(self):
for peer in self.peers.keys():
remove = True
for k, v in self.peers[peer].items():
if v > 0:
remove = False
self.peers[peer][k] = v - 1
if remove:
del self.peers[peer]
| {
"repo_name": "saltduck/OpenBazaar-Server",
"path": "net/dos.py",
"copies": "4",
"size": "1888",
"license": "mit",
"hash": -4127605292399395000,
"line_mean": 30.4666666667,
"line_max": 81,
"alpha_frac": 0.5561440678,
"autogenerated": false,
"ratio": 3.666019417475728,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6222163485275728,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
from log import Logger
from protos.message import Command, PING, STUN, STORE, INV, VALUES, GET_LISTINGS, FOLLOW, UNFOLLOW
from twisted.internet import reactor, task
RECONNECTIONS = 100
MALFORMATED = 110
SCORES = {
FOLLOW: 0,
UNFOLLOW: 0
}
class BanScore(object):
def __init__(self, multiplexer, ban_time=86400):
self.multiplexer = multiplexer
self.ban_time = ban_time
self.peers = {}
self.scoring_loop = task.LoopingCall(self.adjust_scores)
self.scoring_loop.start(30, now=False)
self.log = Logger(system=self)
def process_message(self, peer, message):
if peer[0] not in self.peers:
self.peers[peer[0]] = SCORES.copy()
try:
if message.command == FOLLOW:
self.peers[peer[0]][FOLLOW] += 1
if self.peers[peer[0]][FOLLOW] > 3:
self.ban(peer, FOLLOW)
elif message.command == UNFOLLOW:
self.peers[peer[0]][UNFOLLOW] += 1
if self.peers[peer[0]][UNFOLLOW] > 3:
self.ban(peer, UNFOLLOW)
except Exception:
self.log.warning("Exception processing banscore")
def ban(self, peer, message_type):
reason = Command.Name(message_type)
self.log.warning("Banned %s. Reason: too many %s messages." %
(peer[0], reason))
self.multiplexer.ban_ip(peer[0])
if peer in self.multiplexer:
self.multiplexer[peer].shutdown()
reactor.callLater(self.ban_time, self.multiplexer.remove_ip_ban, peer[0])
def adjust_scores(self):
for peer in self.peers.keys():
remove = True
for k, v in self.peers[peer].items():
if v > 0:
remove = False
self.peers[peer][k] = v - 1
if remove:
del self.peers[peer]
| {
"repo_name": "tyler-smith/OpenBazaar-Server",
"path": "net/dos.py",
"copies": "1",
"size": "1934",
"license": "mit",
"hash": -5408150130231017000,
"line_mean": 31.2333333333,
"line_max": 98,
"alpha_frac": 0.5599793175,
"autogenerated": false,
"ratio": 3.6217228464419478,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46817021639419476,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
from zope.interface import Interface, Attribute
class Multiplexer(Interface):
"""
This interface defines the structure of the protocol class that handles creating new network connections
and sending and receiving messages. At present this is only used by the OpenBazaarProtocol class which
is the protocol for our UDP server. In the future if we want to add additional transports, like I2P, we
they will need to implement this interface so as to not break the rest of the code.
"""
processors = Attribute("""A list of `MessageProcessors`""")
testnet = Attribute("""`bool` are we using testnet""")
vendors = Attribute("""A list `dht.node.Node` vendors""")
ws = Attribute("""The websocket API server""")
blockchain = Attribute("""The `LibbitcoinClient` instance""")
def register_processor(processor):
"""
A method add a `MessageProcessor` to the processors attribute.
"""
def unregister_processor(processor):
"""
Remove a `MessageProcessor` from the processors list.
"""
def set_servers(ws, blockchain):
"""
Set the ws and blockchain attributes.
"""
def send_message(datagram, address, relay_addr):
"""
Send a message over the wire to the given address
Args:
datagram: the serialized message to send
address: the recipients address `tuple`
relay_addr: a replay address `tuple` if used, otherwise None
"""
def __getitem__(addr):
"""
Return the `Connection` of the given address.
Args:
addr: Tuple of destination address (ip, port).
Raises:
KeyError: No connection is handling the given address.
"""
class ConnectionHandler(Interface):
"""
A handler class for each connection.
"""
connection = Attribute("""a `Connection` object for this handler""")
node = Attribute("""a `dht.node.Node` object for the peer. This may be set after receiving the first message""")
processors = Attribute("""A list of `MessageProcessors`""")
def receive_message(datagram):
"""
Receive a datagram over the wire.
"""
def check_new_connection():
"""
Return True if this is the first time this is called else False
"""
class Connection(Interface):
"""
A class representing a connection to a remote peer
"""
handler = Attribute("""a `ConnectionHandler` object for this connection""")
state = Attribute("""a `txrudp.connection.State` enum showing this connection's state""")
def send_message(message):
"""
Send the serialized message to the remote peer.
"""
class MessageProcessor(Interface):
"""
This is an interface for processing messages coming off the wire. Classes that implement this interface should be
passed into 'OpenBazaarProtocol.register_processor' which will parse new messages to determine the message type
then route them to the correct processor.
"""
multiplexer = Attribute("""The main `ConnectionMultiplexer` protocol.
We pass it in here so we can send datagrams from this class.""")
def receive_message(datagram, sender, connection, ban_score):
"""
Called by OpenBazaarProtocol when it receives a new message intended for this processor.
Args:
datagram: The protobuf that came off the wire in unserialized format. Basic validity checks, such as
minimum size and valid protobuf format have already been done.
sender: a `node.Node` object sent by the sender.
connection: the txrudp connection to the peer who sent the message. To respond directly to the peer call
connection.send_message()
ban_score: a `net.dos.BanScore` object used to keep track of misbehaving peers. We need it here because
the processor determines if the incoming message is a request or a response before passing it into
the BanScore.
"""
def connect_multiplexer(multiplexer):
"""
Connect the main ConnectionMultiplexer to this class so we can send outgoing messages.
"""
def __iter__():
"""
OpenBazaarProtocol will use this to check which message types are handled by this processor.
:return: iter([list of enums])
"""
class BroadcastListener(Interface):
"""
An interface for handling broadcasts sent to followers.
"""
def notify(guid, message):
"""
New broadcasts will be sent here. They will only show if this node is following the node
which sent the broadcast.
"""
class MessageListener(Interface):
"""
An interface for handling messages sent between nodes.
"""
def notify(plaintext_message, signature):
"""
New messages will be sent here if they decrypt and parse correctly.
Args:
plaintext_message: the protobuf object containing the message
signature: the signature covering the message.
"""
class NotificationListener(Interface):
"""
An interface for handling event notifications. New events should update this
listener which will save the notifications to the db and push it to UI via websockets.
"""
def notify(guid, handle, type, order_id, title, image_hash):
"""
This should be called to register a new notification.
Args:
guid: (in hex) optional depending on notification type.
handle: optional depending on notification type.
type: a `String` containing the type of notification,
(ex: Follow, New Order, Order Confirmation, Payment Received).
order_id: an order id if this notification is for an order
title: a `String` which can be used for the item's title if an order notification.
image_hash: optional depending on notification type.
""" | {
"repo_name": "OpenBazaar/Network",
"path": "interfaces.py",
"copies": "6",
"size": "6090",
"license": "mit",
"hash": 7559034254325670000,
"line_mean": 34.6198830409,
"line_max": 117,
"alpha_frac": 0.6466338259,
"autogenerated": false,
"ratio": 4.899436846339501,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8546070672239501,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import argparse
import json
import os.path
import platform
import socket
import stun
import sys
import time
from api.ws import WSFactory, AuthenticatedWebSocketProtocol, AuthenticatedWebSocketFactory
from api.restapi import RestAPI
from config import DATA_FOLDER, KSIZE, ALPHA, LIBBITCOIN_SERVERS,\
LIBBITCOIN_SERVERS_TESTNET, SSL_KEY, SSL_CERT, SEEDS, SEEDS_TESTNET, SSL, SERVER_VERSION
from daemon import Daemon
from db.datastore import Database
from dht.network import Server
from dht.node import Node
from dht.storage import ForgetfulStorage
from keys.credentials import get_credentials
from keys.keychain import KeyChain
from log import Logger, FileLogObserver
from market import network
from market.listeners import MessageListenerImpl, BroadcastListenerImpl, NotificationListenerImpl
from market.contracts import check_unfunded_for_payment
from market.btcprice import BtcPrice
from market.profile import Profile
from market.transactions import rebroadcast_unconfirmed
from net.heartbeat import HeartbeatFactory
from net.sslcontext import ChainedOpenSSLContextFactory
from net.upnp import PortMapper
from net.utils import looping_retry
from net.wireprotocol import OpenBazaarProtocol
from obelisk.client import LibbitcoinClient
from protos.objects import FULL_CONE, RESTRICTED, SYMMETRIC
from twisted.internet import reactor, task
from twisted.python import log, logfile
from txws import WebSocketFactory
def run(*args):
TESTNET = args[0]
LOGLEVEL = args[1]
PORT = args[2]
ALLOWIP = args[3]
RESTPORT = args[4]
WSPORT = args[5]
HEARTBEATPORT = args[6]
AUDIT = args[7]
def start_server(keys, first_startup=False):
# logging
logFile = logfile.LogFile.fromFullPath(
os.path.join(DATA_FOLDER, "debug.log"),
rotateLength=15000000,
maxRotatedFiles=1)
log.addObserver(FileLogObserver(logFile, level=LOGLEVEL).emit)
log.addObserver(FileLogObserver(level=LOGLEVEL).emit)
logger = Logger(system="OpenBazaard")
# NAT traversal
p = PortMapper()
p.add_port_mapping(PORT, PORT, "UDP")
logger.info("Finding NAT Type...")
response = looping_retry(stun.get_ip_info, "0.0.0.0", PORT)
logger.info("%s on %s:%s" % (response[0], response[1], response[2]))
ip_address = response[1]
port = response[2]
if response[0] == "Full Cone":
nat_type = FULL_CONE
elif response[0] == "Restric NAT":
nat_type = RESTRICTED
else:
nat_type = SYMMETRIC
def on_bootstrap_complete(resp):
logger.info("bootstrap complete")
task.LoopingCall(mserver.get_messages, mlistener).start(3600)
task.LoopingCall(check_unfunded_for_payment, db, libbitcoin_client, nlistener, TESTNET).start(600)
task.LoopingCall(rebroadcast_unconfirmed, db, libbitcoin_client, TESTNET).start(600)
protocol = OpenBazaarProtocol(db, (ip_address, port), nat_type, testnet=TESTNET,
relaying=True if nat_type == FULL_CONE else False)
# kademlia
SEED_URLS = SEEDS_TESTNET if TESTNET else SEEDS
relay_node = None
if nat_type != FULL_CONE:
for seed in SEED_URLS:
try:
relay_node = (socket.gethostbyname(seed[0].split(":")[0]),
28469 if TESTNET else 18469)
break
except socket.gaierror:
pass
try:
kserver = Server.loadState(os.path.join(DATA_FOLDER, 'cache.pickle'), ip_address, port, protocol, db,
nat_type, relay_node, on_bootstrap_complete, storage)
except Exception:
node = Node(keys.guid, ip_address, port, keys.verify_key.encode(),
relay_node, nat_type, Profile(db).get().vendor)
protocol.relay_node = node.relay_node
kserver = Server(node, db, keys.signing_key, KSIZE, ALPHA, storage=storage)
kserver.protocol.connect_multiplexer(protocol)
kserver.bootstrap(kserver.querySeed(SEED_URLS)).addCallback(on_bootstrap_complete)
kserver.saveStateRegularly(os.path.join(DATA_FOLDER, 'cache.pickle'), 10)
protocol.register_processor(kserver.protocol)
# market
mserver = network.Server(kserver, keys.signing_key, db, AUDIT)
mserver.protocol.connect_multiplexer(protocol)
protocol.register_processor(mserver.protocol)
looping_retry(reactor.listenUDP, port, protocol)
interface = "0.0.0.0" if ALLOWIP != ["127.0.0.1"] else "127.0.0.1"
# websockets api
authenticated_sessions = []
if interface == "127.0.0.1":
authenticated_sessions.append("localhost")
ws_api = WSFactory(mserver, kserver, only_ip=ALLOWIP)
ws_factory = AuthenticatedWebSocketFactory(ws_api)
ws_factory.authenticated_sessions = authenticated_sessions
ws_factory.protocol = AuthenticatedWebSocketProtocol
if SSL:
reactor.listenSSL(WSPORT, ws_factory,
ChainedOpenSSLContextFactory(SSL_KEY, SSL_CERT), interface=interface)
else:
reactor.listenTCP(WSPORT, ws_factory, interface=interface)
# rest api
rest_api = RestAPI(mserver, kserver, protocol, username, password,
authenticated_sessions, only_ip=ALLOWIP)
if SSL:
reactor.listenSSL(RESTPORT, rest_api,
ChainedOpenSSLContextFactory(SSL_KEY, SSL_CERT), interface=interface)
else:
reactor.listenTCP(RESTPORT, rest_api, interface=interface)
# blockchain
if TESTNET:
libbitcoin_client = LibbitcoinClient(LIBBITCOIN_SERVERS_TESTNET, log=Logger(service="LibbitcoinClient"))
else:
libbitcoin_client = LibbitcoinClient(LIBBITCOIN_SERVERS, log=Logger(service="LibbitcoinClient"))
heartbeat_server.libbitcoin = libbitcoin_client
# listeners
nlistener = NotificationListenerImpl(ws_api, db)
mserver.protocol.add_listener(nlistener)
mlistener = MessageListenerImpl(ws_api, db)
mserver.protocol.add_listener(mlistener)
blistener = BroadcastListenerImpl(ws_api, db)
mserver.protocol.add_listener(blistener)
protocol.set_servers(ws_api, libbitcoin_client)
if first_startup:
heartbeat_server.push(json.dumps({
"status": "GUID generation complete",
"username": username,
"password": password
}))
heartbeat_server.set_status("online")
logger.info("startup took %s seconds" % str(round(time.time() - args[7], 2)))
def shutdown():
logger.info("shutting down server")
for vendor in protocol.vendors.values():
db.vendors.save_vendor(vendor.id.encode("hex"), vendor.getProto().SerializeToString())
PortMapper().clean_my_mappings(PORT)
protocol.shutdown()
reactor.addSystemEventTrigger('before', 'shutdown', shutdown)
# database
db = Database(TESTNET)
storage = ForgetfulStorage()
# client authentication
username, password = get_credentials(db)
# heartbeat server
interface = "0.0.0.0" if ALLOWIP != ["127.0.0.1"] else "127.0.0.1"
heartbeat_server = HeartbeatFactory(only_ip=ALLOWIP)
if SSL:
reactor.listenSSL(HEARTBEATPORT, WebSocketFactory(heartbeat_server),
ChainedOpenSSLContextFactory(SSL_KEY, SSL_CERT), interface=interface)
else:
reactor.listenTCP(HEARTBEATPORT, WebSocketFactory(heartbeat_server), interface=interface)
btcPrice = BtcPrice()
btcPrice.start()
# key generation
KeyChain(db, start_server, heartbeat_server)
reactor.run()
btcPrice.closethread()
btcPrice.join(1)
if __name__ == "__main__":
if sys.version_info < (2, 7, 9):
print "You must use python 2.7.9 or greater"
sys.exit(0)
# pylint: disable=anomalous-backslash-in-string
class OpenBazaard(Daemon):
def run(self, *args):
run(*args)
class Parser(object):
def __init__(self, daemon):
self.daemon = daemon
parser = argparse.ArgumentParser(
description='OpenBazaar-Server v0.2.4',
usage='''
python openbazaard.py <command> [<args>]
python openbazaard.py <command> --help
commands:
start start the OpenBazaar server
stop shutdown the server and disconnect
restart restart the server
''')
parser.add_argument('command', help='Execute the given command')
args = parser.parse_args(sys.argv[1:2])
if not hasattr(self, args.command):
parser.print_help()
exit(1)
getattr(self, args.command)()
def start(self):
parser = argparse.ArgumentParser(
description="Start the OpenBazaar server",
usage="python openbazaard.py start [<args>]"
)
parser.add_argument('-d', '--daemon', action='store_true',
help="run the server in the background as a daemon")
parser.add_argument('-t', '--testnet', action='store_true', help="use the test network")
parser.add_argument('-l', '--loglevel', default="info",
help="set the logging level [debug, info, warning, error, critical]")
parser.add_argument('-p', '--port', help="set the network port")
parser.add_argument('-a', '--allowip', default=["127.0.0.1"], action="append",
help="only allow api connections from this ip")
parser.add_argument('-r', '--restapiport', help="set the rest api port", default=18469)
parser.add_argument('-w', '--websocketport', help="set the websocket api port", default=18466)
parser.add_argument('-b', '--heartbeatport', help="set the heartbeat port", default=18470)
parser.add_argument('-u', '--disableaudit', action='store_true', help="disable event logging")
parser.add_argument('--pidfile', help="name of the pid file", default="openbazaard.pid")
args = parser.parse_args(sys.argv[2:])
self.print_splash_screen()
unix = ("linux", "linux2", "darwin")
if args.port:
port = int(args.port)
else:
port = 18467 if not args.testnet else 28467
if args.daemon and platform.system().lower() in unix:
self.daemon.pidfile = "/tmp/" + args.pidfile
self.daemon.start(args.testnet, args.loglevel, port, args.allowip,
int(args.restapiport), int(args.websocketport),
int(args.heartbeatport), time.time(), args.disableaudit)
else:
run(args.testnet, args.loglevel, port, args.allowip,
int(args.restapiport), int(args.websocketport),
int(args.heartbeatport), time.time(), args.disableaudit)
def stop(self):
# pylint: disable=W0612
parser = argparse.ArgumentParser(
description="Shutdown the server and disconnect",
usage='''usage:
python openbazaard.py stop''')
args = parser.parse_args(sys.argv[2:])
print "OpenBazaar server stopping..."
self.daemon.stop()
def restart(self):
# pylint: disable=W0612
parser = argparse.ArgumentParser(
description="Restart the server",
usage='''usage:
python openbazaard.py restart''')
parser.parse_args(sys.argv[2:])
print "Restarting OpenBazaar server..."
self.daemon.restart()
@staticmethod
def print_splash_screen():
OKBLUE = '\033[94m'
ENDC = '\033[0m'
print "________ " + OKBLUE + " __________" + ENDC
print "\_____ \ ______ ____ ____" + OKBLUE + \
"\______ \_____ _____________ _____ _______" + ENDC
print " / | \\\____ \_/ __ \ / \\" + OKBLUE +\
"| | _/\__ \ \___ /\__ \ \__ \\\_ __ \ " + ENDC
print "/ | \ |_> > ___/| | \ " + OKBLUE \
+ "| \ / __ \_/ / / __ \_/ __ \| | \/" + ENDC
print "\_______ / __/ \___ >___| /" + OKBLUE + "______ /(____ /_____ \(____ (____ /__|" + ENDC
print " \/|__| \/ \/ " + OKBLUE + " \/ \/ \/ \/ \/" + ENDC
print
print "OpenBazaar Server " + SERVER_VERSION + " starting..."
Parser(OpenBazaard('/tmp/openbazaard.pid'))
| {
"repo_name": "cpacia/OpenBazaar-Server",
"path": "openbazaard.py",
"copies": "1",
"size": "13086",
"license": "mit",
"hash": 4007209120806503000,
"line_mean": 40.4113924051,
"line_max": 116,
"alpha_frac": 0.587421672,
"autogenerated": false,
"ratio": 3.938007824255191,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0027477794302742395,
"num_lines": 316
} |
__author__ = 'chris'
import argparse
import string
import pickle
import json
from twisted.internet import reactor
from txjsonrpc.netstring.jsonrpc import Proxy
from os.path import expanduser
from bitcoin import *
datafolder = expanduser("~") + "/.subspace/"
if os.path.isfile(datafolder + 'keys.pickle'):
privkey = pickle.load(open(datafolder + "keys.pickle", "rb"))
def doContinue(value):
pass
def printValue(value):
print json.dumps(value, indent=4)
reactor.stop()
def printError(error):
print 'error', error
reactor.stop()
class Parser(object):
def __init__(self, proxy):
parser = argparse.ArgumentParser(
description='Subspace v0.2',
usage='''
subspace <command> [<args>]
subspace <command> --help
commands:
getinfo returns an object containing various state info
getmessages returns a list of your messages in json format
getnew returns messages that have not been previously returned by this command
getprivkey returns your private encryption key
getpubkey returns your node's public encryption key
send sends a message to the given public key
start start the subspace daemon
stop close subspace and disconnect
''')
parser.add_argument('command', help='Execute the given command')
parser.add_argument('-n', '--noisy', action='store_true', help="show log output")
args = parser.parse_args(sys.argv[1:2])
if not hasattr(self, args.command):
parser.print_help()
exit(1)
getattr(self, args.command)()
self.proxy = proxy
def send(self):
parser = argparse.ArgumentParser(
description="Send a message to the recipient's public key",
usage='''usage:
subspace send [-k PUBLIC KEY] [-m MESSAGE]''')
parser.add_argument('-k', '--key', required=True, help="recipient's public key")
parser.add_argument('-m', '--message', required=True,
help="the unencrypted message to send (will be encrypted)",
nargs='+')
parser.add_argument('-d', '--dontstore', action='store_true', help="sends to an online recipient without storing on the network")
args = parser.parse_args(sys.argv[2:])
key = args.key
if len(args.key) != 66 or all(c in string.hexdigits for c in args.key) is not True:
try:
key = b58check_to_hex(args.key)
except:
print "Invalid key. Enter a 33 byte public key in either hexadecimal for base58check format."
return
if args.dontstore:
d = proxy.callRemote('send', key, args.message, False)
else:
d = proxy.callRemote('send', key, args.message)
d.addCallbacks(printValue, printError)
reactor.run()
def getmessages(self):
parser = argparse.ArgumentParser(
description='Returns a list of your messages in json format',
usage='''usage:
subspace getmessages''')
args = parser.parse_args(sys.argv[2:])
d = proxy.callRemote('getmessages')
d.addCallbacks(printValue, printError)
reactor.run()
def getprivkey(self):
def printKey(key):
if args.base58:
print encode_privkey(key, "wif")
else:
print key
reactor.stop()
parser = argparse.ArgumentParser(
description="Returns your private encryption key",
usage='''usage:
subspace getprivkey''')
parser.add_argument('-b', '--base58', action='store_true', help="returns the key in base58check format")
args = parser.parse_args(sys.argv[2:])
d = proxy.callRemote('getprivkey')
d.addCallbacks(printKey, printError)
reactor.run()
def getpubkey(self):
def printKey(key):
if args.base58:
print hex_to_b58check(key, 0)
else:
print key
reactor.stop()
parser = argparse.ArgumentParser(
description="Returns your node's public encryption key",
usage='''usage:
subspace getpubkey''')
parser.add_argument('-b', '--base58', action='store_true', help="returns the key in base58check format")
args = parser.parse_args(sys.argv[2:])
d = proxy.callRemote('getpubkey')
d.addCallbacks(printKey, printError)
reactor.run()
def getnew(self):
parser = argparse.ArgumentParser(
description="Returns messages that have not previously been returned by this command",
usage='''usage:
subspace getnew''')
args = parser.parse_args(sys.argv[2:])
d = proxy.callRemote('getnew')
d.addCallbacks(printValue, printError)
reactor.run()
def getinfo(self):
parser = argparse.ArgumentParser(
description="Returns an object containing various state info",
usage='''usage:
subspace getinfo''')
args = parser.parse_args(sys.argv[2:])
d = proxy.callRemote('getinfo')
d.addCallbacks(printValue, printError)
reactor.run()
proxy = Proxy('127.0.0.1', 8336)
Parser(proxy)
| {
"repo_name": "cpacia/Subspace",
"path": "subspace-cli.py",
"copies": "1",
"size": "5318",
"license": "mit",
"hash": 121175146858364980,
"line_mean": 35.4246575342,
"line_max": 137,
"alpha_frac": 0.6032342986,
"autogenerated": false,
"ratio": 4.240829346092504,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5344063644692504,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import ast
import json
import os
import time
from binascii import unhexlify
from random import shuffle
import nacl.encoding
import nacl.signing
from twisted.internet.protocol import Protocol, Factory, connectionDone
from txws import WebSocketProtocol, WebSocketFactory
from api.utils import smart_unicode, sanitize_html
from config import DATA_FOLDER, str_to_bool
from dht.node import Node
from keys.keychain import KeyChain
from log import Logger
from market.profile import Profile
from protos import objects
from protos.countries import CountryCode
from protos.objects import PlaintextMessage, Value, Listings
ALLOWED_TAGS = ('h2', 'h3', 'h4', 'h5', 'h6', 'p', 'a', 'u', 'ul', 'ol', 'nl', 'li', 'b', 'i', 'strong',
'em', 'strike', 'hr', 'br', 'img', 'blockquote', 'span')
# pylint: disable=W0232
class WSProtocol(Protocol):
"""
Handles new incoming requests coming from a websocket.
"""
def __init__(self):
self.log = Logger(system=self)
def connectionMade(self):
self.factory.register(self)
def connectionLost(self, reason=connectionDone):
self.factory.unregister(self)
def get_vendors(self, message_id):
if message_id in self.factory.outstanding_vendors:
queried = self.factory.outstanding_vendors[message_id]
else:
queried = []
self.factory.outstanding_vendors = {}
self.factory.outstanding_vendors[message_id] = queried
vendors = self.factory.mserver.protocol.multiplexer.vendors.values()
shuffle(vendors)
to_query = []
for vendor in vendors:
if vendor.id not in queried:
to_query.append(vendor)
def handle_response(metadata, node):
to_query.remove(node)
if metadata is not None:
vendor = {
"id": message_id,
"vendor":
{
"guid": node.id.encode("hex"),
"name": metadata.name,
"short_description": metadata.short_description,
"handle": metadata.handle,
"avatar_hash": metadata.avatar_hash.encode("hex"),
"nsfw": metadata.nsfw
}
}
self.transport.write(json.dumps(sanitize_html(vendor), indent=4))
queried.append(node.id)
return True
else:
if node.id in self.factory.mserver.protocol.multiplexer.vendors:
del self.factory.mserver.protocol.multiplexer.vendors[node.id]
self.factory.db.vendors.delete_vendor(node.id.encode("hex"))
return False
for node in to_query[:30]:
self.factory.mserver.get_user_metadata(node).addCallback(handle_response, node)
def get_moderators(self, message_id):
def parse_response(moderators):
if moderators is not None:
current_mods = json.loads(self.factory.db.settings.get()[11])
self.factory.db.moderators.clear_all(except_guids=current_mods)
def parse_profile(profile, node):
if profile is not None:
# TODO: should check signatures here before entering in database
self.factory.db.moderators.save_moderator(node.id.encode("hex"), node.pubkey,
profile.bitcoin_key.public_key,
profile.bitcoin_key.signature, profile.name,
profile.avatar_hash, profile.moderation_fee,
profile.handle, profile.short_description)
moderator = {
"id": message_id,
"moderator":
{
"guid": node.id.encode("hex"),
"name": profile.name,
"handle": profile.handle,
"short_description": profile.short_description,
"avatar_hash": profile.avatar_hash.encode("hex"),
"about": profile.about,
"fee": profile.moderation_fee
}
}
self.transport.write(json.dumps(sanitize_html(moderator), indent=4))
else:
self.factory.db.moderators.delete_moderator(node.id)
for mod in moderators:
try:
val = objects.Value()
val.ParseFromString(mod)
n = objects.Node()
n.ParseFromString(val.serializedData)
node_to_ask = Node(n.guid, n.nodeAddress.ip, n.nodeAddress.port, n.publicKey,
None if not n.HasField("relayAddress") else
(n.relayAddress.ip, n.relayAddress.port),
n.natType, n.vendor)
if n.guid == KeyChain(self.factory.db).guid:
parse_profile(Profile(self.factory.db).get(), node_to_ask)
else:
self.factory.mserver.get_profile(node_to_ask)\
.addCallback(parse_profile, node_to_ask)
except Exception:
pass
self.factory.kserver.get("moderators").addCallback(parse_response)
def get_homepage_listings(self, message_id, only_following=False):
if message_id not in self.factory.outstanding_listings:
self.factory.outstanding_listings = {}
self.factory.outstanding_listings[message_id] = []
vendors = dict(self.factory.mserver.protocol.multiplexer.vendors)
self.log.info("Fetching listings from %s vendors" % len(vendors))
def get_following_from_vendors(vendors):
follow_data = self.factory.mserver.db.follow.get_following()
following_guids = []
if follow_data is not None:
f = objects.Following()
f.ParseFromString(follow_data)
for user in f.users:
following_guids.append(user.guid)
vendor_list = []
for k, v in vendors.items():
if k in following_guids:
vendor_list.append(v)
return vendor_list
def handle_response(listings, node):
count = 0
if listings is not None:
for l in listings.listing:
try:
if l.contract_hash not in self.factory.outstanding_listings[message_id]:
listing_json = {
"id": message_id,
"listing":
{
"guid": node.id.encode("hex"),
"handle": listings.handle,
"avatar_hash": listings.avatar_hash.encode("hex"),
"title": l.title,
"contract_hash": l.contract_hash.encode("hex"),
"thumbnail_hash": l.thumbnail_hash.encode("hex"),
"category": l.category,
"price": l.price,
"currency_code": l.currency_code,
"nsfw": l.nsfw,
"origin": str(CountryCode.Name(l.origin)),
"ships_to": []
}
}
if l.contract_type != 0:
listing_json["contract_type"] = str(Listings.ContractType.Name(l.contract_type))
for country in l.ships_to:
listing_json["listing"]["ships_to"].append(str(CountryCode.Name(country)))
if not os.path.isfile(os.path.join( \
DATA_FOLDER, 'cache', l.thumbnail_hash.encode("hex"))):
self.factory.mserver.get_image(node, l.thumbnail_hash)
if not os.path.isfile(os.path.join( \
DATA_FOLDER, 'cache', listings.avatar_hash.encode("hex"))):
self.factory.mserver.get_image(node, listings.avatar_hash)
self.transport.write(json.dumps(sanitize_html(listing_json), indent=4))
count += 1
self.factory.outstanding_listings[message_id].append(l.contract_hash)
if count == 3:
break
except Exception:
pass
if node.id in vendors:
del vendors[node.id]
else:
if node.id in vendors:
del vendors[node.id]
if node.id in self.factory.mserver.protocol.multiplexer.vendors:
del self.factory.mserver.protocol.multiplexer.vendors[node.id]
self.factory.db.vendors.delete_vendor(node.id.encode("hex"))
if only_following:
vendor_list = get_following_from_vendors(vendors)
else:
vendor_list = vendors.values()
if len(vendor_list) > 0:
shuffle(vendor_list)
node_to_ask = vendor_list[0]
if node_to_ask is not None:
self.factory.mserver.get_listings(node_to_ask).addCallback(handle_response, node_to_ask)
if only_following:
vendor_list = get_following_from_vendors(vendors)
else:
vendor_list = vendors.values()
shuffle(vendor_list)
for vendor in vendor_list[:15]:
self.factory.mserver.get_listings(vendor).addCallback(handle_response, vendor)
def send_message(self, message_id, guid, handle, message, subject, message_type, recipient_key):
enc_key = nacl.signing.VerifyKey(unhexlify(recipient_key)).to_curve25519_public_key().encode()
self.factory.db.messages.save_message(guid, handle, unhexlify(recipient_key), subject,
message_type.upper(), message, time.time(), "", "", True,
message_id)
def send(node_to_send):
n = node_to_send if node_to_send is not None else Node(unhexlify(guid))
self.factory.mserver.send_message(n, enc_key,
PlaintextMessage.Type.Value(message_type.upper()),
message, subject,
store_only=True if node_to_send is None else False)
self.factory.kserver.resolve(unhexlify(guid)).addCallback(send)
def search(self, message_id, keyword):
def respond(l, node):
if l is not None:
listing_json = {
"id": message_id,
"listing":
{
"guid": node.id.encode("hex"),
"title": l.title,
"contract_hash": l.contract_hash.encode("hex"),
"thumbnail_hash": l.thumbnail_hash.encode("hex"),
"category": l.category,
"price": l.price,
"currency_code": l.currency_code,
"nsfw": l.nsfw,
"origin": str(CountryCode.Name(l.origin)),
"ships_to": [],
"avatar_hash": l.avatar_hash.encode("hex"),
"handle": l.handle
}
}
for country in l.ships_to:
listing_json["listing"]["ships_to"].append(str(CountryCode.Name(country)))
self.transport.write(json.dumps(sanitize_html(listing_json), indent=4))
def parse_results(values):
if values is not None:
for v in values:
try:
val = Value()
val.ParseFromString(v)
n = objects.Node()
n.ParseFromString(val.serializedData)
node_to_ask = Node(n.guid, n.nodeAddress.ip, n.nodeAddress.port, n.publicKey,
None if not n.HasField("relayAddress") else
(n.relayAddress.ip, n.relayAddress.port),
n.natType, n.vendor)
if n.guid == KeyChain(self.factory.db).guid:
proto = self.factory.db.listings.get_proto()
l = Listings()
l.ParseFromString(proto)
for listing in l.listing:
if listing.contract_hash == val.valueKey:
respond(listing, node_to_ask)
else:
self.factory.mserver.get_contract_metadata(node_to_ask, val.valueKey)\
.addCallback(respond, node_to_ask)
except Exception:
pass
self.factory.kserver.get(keyword.lower()).addCallback(parse_results)
def dataReceived(self, payload):
try:
request_json = json.loads(payload)
if isinstance(request_json, unicode):
payload = ast.literal_eval(payload)
request_json = json.loads(payload)
message_id = str(request_json["request"]["id"])
if request_json["request"]["command"] == "get_vendors":
self.get_vendors(message_id)
if request_json["request"]["command"] == "get_moderators":
self.get_moderators(message_id)
elif request_json["request"]["command"] == "get_homepage_listings":
self.get_homepage_listings(message_id,
str_to_bool(request_json["request"]["only_following"])
if "only_following" in request_json["request"] else False)
elif request_json["request"]["command"] == "search":
self.search(message_id, request_json["request"]["keyword"].lower())
elif request_json["request"]["command"] == "send_message":
self.send_message(message_id, request_json["request"]["guid"],
request_json["request"]["handle"],
smart_unicode(request_json["request"]["message"]),
request_json["request"]["subject"],
request_json["request"]["message_type"],
request_json["request"]["public_key"])
except Exception as e:
print 'Exception occurred: %s' % e
class WSFactory(Factory):
def __init__(self, mserver, kserver, only_ip=None):
if only_ip == None:
only_ip = ["127.0.0.1"]
self.mserver = mserver
self.kserver = kserver
self.db = mserver.db
self.outstanding_listings = {}
self.outstanding_vendors = {}
self.protocol = WSProtocol
self.only_ip = only_ip
self.clients = []
def buildProtocol(self, addr):
if addr.host not in self.only_ip and "0.0.0.0" not in self.only_ip:
return
return Factory.buildProtocol(self, addr)
def register(self, client):
if client not in self.clients:
self.clients.append(client)
def unregister(self, client):
if client in self.clients:
self.clients.remove(client)
def push(self, msg):
for c in self.clients:
c.transport.write(msg)
class AuthenticatedWebSocketProtocol(WebSocketProtocol):
def validateHeaders(self):
if "Cookie" in self.headers:
for session in self.factory.authenticated_sessions:
if "TWISTED_SESSION=" + session.uid in self.headers["Cookie"]:
return WebSocketProtocol.validateHeaders(self)
return False
class AuthenticatedWebSocketFactory(WebSocketFactory):
authenticated_sessions = None
| {
"repo_name": "tomgalloway/OpenBazaar-Server",
"path": "api/ws.py",
"copies": "3",
"size": "17244",
"license": "mit",
"hash": 8093849418332083000,
"line_mean": 45.4797843666,
"line_max": 112,
"alpha_frac": 0.4838204593,
"autogenerated": false,
"ratio": 4.689692684253467,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0019274892049868139,
"num_lines": 371
} |
__author__ = 'chris'
import ast
import json
import os
import time
from constants import DATA_FOLDER, SEED
from market.profile import Profile
from keyutils.keys import KeyChain
from random import shuffle
from autobahn.twisted.websocket import WebSocketServerFactory, WebSocketServerProtocol
from protos.countries import CountryCode
from protos.objects import Plaintext_Message, Value, Listings
from protos import objects
from binascii import unhexlify
from dht.node import Node
class WSProtocol(WebSocketServerProtocol):
"""
Handles new incoming requests coming from a websocket.
"""
def onOpen(self):
self.factory.register(self)
def get_vendors(self, message_id):
if message_id in self.factory.outstanding_vendors:
queried = self.factory.outstanding_vendors[message_id]
else:
queried = []
self.factory.outstanding_vendors = {}
self.factory.outstanding_vendors[message_id] = queried
vendors = self.factory.db.VendorStore().get_vendors()
shuffle(vendors)
to_query = []
for vendor in vendors:
if vendor.id not in queried:
to_query.append(vendor)
def handle_response(metadata, node):
to_query.remove(node)
if len(to_query) == 0:
self.factory.mserver.querySeed(SEED)
if metadata is not None:
vendor = {
"id": message_id,
"vendor":
{
"guid": node.id.encode("hex"),
"name": metadata.name,
"short_description": metadata.short_description,
"handle": metadata.handle,
"avatar_hash": metadata.avatar_hash.encode("hex"),
"nsfw": metadata.nsfw
}
}
self.sendMessage(json.dumps(vendor, indent=4), False)
queried.append(node.id)
return True
else:
self.factory.db.VendorStore().delete_vendor(node.id.encode("hex"))
return False
for node in to_query[:30]:
self.factory.mserver.get_user_metadata(node).addCallback(handle_response, node)
def get_moderators(self, message_id):
m = self.factory.db.ModeratorStore()
def parse_response(moderators):
if moderators is not None:
m.clear_all()
def parse_profile(profile, node):
if profile is not None:
m.save_moderator(node.id.encode("hex"), node.signed_pubkey,
profile.encryption_key.public_key,
profile.encryption_key.signature, profile.bitcoin_key.public_key,
profile.bitcoin_key.signature, profile.name, profile.avatar_hash,
profile.moderation_fee, profile.handle, profile.short_description)
moderator = {
"id": message_id,
"moderator":
{
"guid": node.id.encode("hex"),
"name": profile.name,
"handle": profile.handle,
"short_description": profile.short_description,
"avatar_hash": profile.avatar_hash.encode("hex"),
"about": profile.about,
"fee": profile.moderation_fee
}
}
self.sendMessage(json.dumps(moderator, indent=4), False)
else:
m.delete_moderator(node.id)
for mod in moderators:
try:
val = objects.Value()
val.ParseFromString(mod)
n = objects.Node()
n.ParseFromString(val.serializedData)
node_to_ask = Node(n.guid, n.ip, n.port, n.signedPublicKey)
if n.guid == KeyChain(self.factory.db).guid:
parse_profile(Profile(self.factory.db).get(), node_to_ask)
else:
self.factory.mserver.get_profile(node_to_ask)\
.addCallback(parse_profile, node_to_ask)
except Exception:
pass
self.factory.kserver.get("moderators").addCallback(parse_response)
def get_homepage_listings(self, message_id):
if message_id not in self.factory.outstanding_listings:
self.factory.outstanding_listings = {}
self.factory.outstanding_listings[message_id] = []
vendors = self.factory.db.VendorStore().get_vendors()
if len(vendors) == 0:
self.factory.mserver.querySeed(SEED)
vendors = self.factory.db.VendorStore().get_vendors()
shuffle(vendors)
def handle_response(listings, node):
count = 0
if listings is not None:
for l in listings.listing:
if l.contract_hash not in self.factory.outstanding_listings[message_id]:
listing_json = {
"id": message_id,
"listing":
{
"guid": node.id.encode("hex"),
"handle": listings.handle,
"avatar_hash": listings.avatar_hash.encode("hex"),
"title": l.title,
"contract_hash": l.contract_hash.encode("hex"),
"thumbnail_hash": l.thumbnail_hash.encode("hex"),
"category": l.category,
"price": l.price,
"currency_code": l.currency_code,
"nsfw": l.nsfw,
"origin": str(CountryCode.Name(l.origin)),
"ships_to": []
}
}
for country in l.ships_to:
listing_json["listing"]["ships_to"].append(str(CountryCode.Name(country)))
if not os.path.isfile(DATA_FOLDER + 'cache/' + l.thumbnail_hash.encode("hex")):
self.factory.mserver.get_image(node, l.thumbnail_hash)
if not os.path.isfile(DATA_FOLDER + 'cache/' + listings.avatar_hash.encode("hex")):
self.factory.mserver.get_image(node, listings.avatar_hash)
self.sendMessage(json.dumps(listing_json, indent=4), False)
count += 1
self.factory.outstanding_listings[message_id].append(l.contract_hash)
if count == 3:
break
vendors.remove(node)
else:
self.factory.db.VendorStore().delete_vendor(node.id.encode("hex"))
vendors.remove(node)
for vendor in vendors[:15]:
self.factory.mserver.get_listings(vendor).addCallback(handle_response, vendor)
def send_message(self, guid, handle, message, subject, message_type, recipient_encryption_key):
self.factory.db.MessageStore().save_message(guid, handle, "", unhexlify(recipient_encryption_key), subject,
message_type.upper(), message, time.time(), "", "", True)
def send(node_to_send):
n = node_to_send if node_to_send is not None else Node(unhexlify(guid), "123.4.5.6", 1234)
self.factory.mserver.send_message(n, recipient_encryption_key,
Plaintext_Message.Type.Value(message_type.upper()),
message, subject)
self.factory.kserver.resolve(unhexlify(guid)).addCallback(send)
def search(self, message_id, keyword):
def respond(l, node):
if l is not None:
listing_json = {
"id": message_id,
"listing":
{
"guid": node.id.encode("hex"),
"title": l.title,
"contract_hash": l.contract_hash.encode("hex"),
"thumbnail_hash": l.thumbnail_hash.encode("hex"),
"category": l.category,
"price": l.price,
"currency_code": l.currency_code,
"nsfw": l.nsfw,
"origin": str(CountryCode.Name(l.origin)),
"ships_to": []
}
}
for country in l.ships_to:
listing_json["listing"]["ships_to"].append(str(CountryCode.Name(country)))
self.sendMessage(json.dumps(listing_json, indent=4), False)
def parse_results(values):
if values is not None:
for v in values:
try:
val = Value()
val.ParseFromString(v)
n = objects.Node()
n.ParseFromString(val.serializedData)
node_to_ask = Node(n.guid, n.ip, n.port, n.signedPublicKey, True)
if n.guid == KeyChain(self.factory.db).guid:
proto = self.factory.db.ListingsStore().get_proto()
l = Listings()
l.ParseFromString(proto)
for listing in l.listing:
if listing.contract_hash == val.valueKey:
respond(listing, node_to_ask)
else:
self.factory.mserver.get_contract_metadata(node_to_ask, val.valueKey)\
.addCallback(respond, node_to_ask)
except Exception:
pass
self.factory.kserver.get(keyword.lower()).addCallback(parse_results)
def onMessage(self, payload, isBinary):
try:
request_json = json.loads(payload)
if isinstance(request_json, unicode):
payload = ast.literal_eval(payload)
request_json = json.loads(payload)
message_id = request_json["request"]["id"]
if request_json["request"]["command"] == "get_vendors":
self.get_vendors(message_id)
if request_json["request"]["command"] == "get_moderators":
self.get_moderators(message_id)
elif request_json["request"]["command"] == "get_homepage_listings":
self.get_homepage_listings(message_id)
elif request_json["request"]["command"] == "search":
self.search(message_id, request_json["request"]["keyword"].lower())
elif request_json["request"]["command"] == "send_message":
self.send_message(request_json["request"]["guid"],
request_json["request"]["handle"],
request_json["request"]["message"],
request_json["request"]["subject"],
request_json["request"]["message_type"],
request_json["request"]["recipient_key"])
except Exception as e:
print 'Exception occurred: %s' % e
def connectionLost(self, reason):
WebSocketServerProtocol.connectionLost(self, reason)
self.factory.unregister(self)
class WSFactory(WebSocketServerFactory):
"""
Simple broadcast server broadcasting any message it receives to all
currently connected clients.
"""
def __init__(self, url, mserver, kserver, only_ip="127.0.0.1", debug=False, debugCodePaths=False):
WebSocketServerFactory.__init__(self, url, debug=debug, debugCodePaths=debugCodePaths)
self.mserver = mserver
self.kserver = kserver
self.db = mserver.db
self.outstanding_listings = {}
self.outstanding_vendors = {}
self.clients = []
self.only_ip = only_ip
def register(self, client):
if client.transport.getPeer().host != self.only_ip and self.only_ip != "0.0.0.0":
client.transport.loseConnection()
elif client not in self.clients:
self.clients.append(client)
def unregister(self, client):
if client in self.clients:
self.clients.remove(client)
def push(self, msg):
for c in self.clients:
c.sendMessage(msg)
| {
"repo_name": "hauxir/OpenBazaar-Server",
"path": "api/ws.py",
"copies": "1",
"size": "13268",
"license": "mit",
"hash": -941675774249212800,
"line_mean": 44.2832764505,
"line_max": 115,
"alpha_frac": 0.4884684956,
"autogenerated": false,
"ratio": 4.71499644633973,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.001636482687662472,
"num_lines": 293
} |
__author__ = 'chris'
import base64
import bitcointools
import gnupg
import httplib
import json
import nacl.signing
import nacl.hash
import nacl.encoding
import nacl.utils
import obelisk
import os.path
import pickle
import time
from binascii import unhexlify
from collections import OrderedDict
from config import DATA_FOLDER, TRANSACTION_FEE
from dht.node import Node
from dht.utils import digest
from keys.bip32utils import derive_childkey
from keys.keychain import KeyChain
from log import Logger
from market.contracts import Contract
from market.moderation import process_dispute, close_dispute
from market.profile import Profile
from market.protocol import MarketProtocol
from market.transactions import BitcoinTransaction
from nacl.public import PrivateKey, PublicKey, Box
from protos import objects
from seed import peers
from twisted.internet import defer, reactor, task
class Server(object):
def __init__(self, kserver, signing_key, database):
"""
A high level class for sending direct, market messages to other nodes.
A node will need one of these to participate in buying and selling.
Should be initialized after the Kademlia server.
"""
self.kserver = kserver
self.signing_key = signing_key
self.router = kserver.protocol.router
self.db = database
self.log = Logger(system=self)
self.protocol = MarketProtocol(kserver.node, self.router, signing_key, database)
task.LoopingCall(self.update_listings).start(3600, now=True)
def querySeed(self, list_seed_pubkey):
"""
Query an HTTP seed for known vendors and save the vendors to the db.
Args:
Receives a list of one or more tuples Example [(seed, pubkey)]
seed: A `string` consisting of "ip:port" or "hostname:port"
pubkey: The hex encoded public key to verify the signature on the response
"""
for sp in list_seed_pubkey:
seed, pubkey = sp
try:
self.log.debug("querying %s for vendors" % seed)
c = httplib.HTTPConnection(seed)
c.request("GET", "/?type=vendors")
response = c.getresponse()
self.log.debug("Http response from %s: %s, %s" % (seed, response.status, response.reason))
data = response.read()
reread_data = data.decode("zlib")
proto = peers.PeerSeeds()
proto.ParseFromString(reread_data)
verify_key = nacl.signing.VerifyKey(pubkey, encoder=nacl.encoding.HexEncoder)
verify_key.verify("".join(proto.serializedNode), proto.signature)
for peer in proto.serializedNode:
try:
n = objects.Node()
n.ParseFromString(peer)
self.db.vendors.save_vendor(n.guid.encode("hex"), peer)
except Exception:
pass
except Exception, e:
self.log.error("failed to query seed: %s" % str(e))
def get_contract(self, node_to_ask, contract_id):
"""
Will query the given node to fetch a contract given its hash.
If the returned contract doesn't have the same hash, it will return None.
After acquiring the contract it will download all the associated images if it
does not already have them in cache.
Args:
node_to_ask: a `dht.node.Node` object containing an ip and port
contract_id: a 20 byte hash in raw byte format
"""
def get_result(result):
try:
if result[0]:
contract = json.loads(result[1][0], object_pairs_hook=OrderedDict)
id_in_contract = contract["vendor_offer"]["listing"]["contract_id"]
if id_in_contract != contract_id.encode("hex"):
raise Exception("Contract ID doesn't match")
# TODO: verify the guid in the contract matches this node's guid
signature = contract["vendor_offer"]["signatures"]["guid"]
verify_obj = json.dumps(contract["vendor_offer"]["listing"], indent=4)
verify_key = nacl.signing.VerifyKey(node_to_ask.pubkey)
verify_key.verify(verify_obj, base64.b64decode(signature))
bitcoin_key = contract["vendor_offer"]["listing"]["id"]["pubkeys"]["bitcoin"]
bitcoin_sig = contract["vendor_offer"]["signatures"]["bitcoin"]
valid = bitcointools.ecdsa_raw_verify(verify_obj, bitcointools.decode_sig(bitcoin_sig),
bitcoin_key)
if not valid:
raise Exception("Invalid Bitcoin signature")
if "moderators" in contract["vendor_offer"]["listing"]:
for moderator in contract["vendor_offer"]["listing"]["moderators"]:
guid = moderator["guid"]
guid_key = moderator["pubkeys"]["guid"]
bitcoin_key = moderator["pubkeys"]["bitcoin"]["key"]
bitcoin_sig = base64.b64decode(moderator["pubkeys"]["bitcoin"]["signature"])
h = nacl.hash.sha512(unhexlify(guid_key))
pow_hash = h[40:]
if int(pow_hash[:6], 16) >= 50 or guid != h[:40]:
raise Exception('Invalid GUID')
verify_key = nacl.signing.VerifyKey(guid_key, encoder=nacl.encoding.HexEncoder)
verify_key.verify(unhexlify(bitcoin_key), bitcoin_sig)
#TODO: should probably also validate the handle here.
self.cache(result[1][0], id_in_contract)
if "image_hashes" in contract["vendor_offer"]["listing"]["item"]:
for image_hash in contract["vendor_offer"]["listing"]["item"]["image_hashes"]:
self.get_image(node_to_ask, unhexlify(image_hash))
return contract
else:
self.log.warning("Fetched an invalid contract from %s" % node_to_ask.id.encode("hex"))
return None
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
self.log.info("fetching contract %s from %s" % (contract_id.encode("hex"), node_to_ask))
d = self.protocol.callGetContract(node_to_ask, contract_id)
return d.addCallback(get_result)
def get_image(self, node_to_ask, image_hash):
"""
Will query the given node to fetch an image given its hash.
If the returned image doesn't have the same hash, it will return None.
Args:
node_to_ask: a `dht.node.Node` object containing an ip and port
image_hash: a 20 byte hash in raw byte format
"""
def get_result(result):
try:
if result[0] and digest(result[1][0]) == image_hash:
self.cache(result[1][0], digest(result[1][0]).encode("hex"))
return result[1][0]
else:
return None
except Exception:
return None
if node_to_ask.ip is None or len(image_hash) != 20:
return defer.succeed(None)
self.log.info("fetching image %s from %s" % (image_hash.encode("hex"), node_to_ask))
d = self.protocol.callGetImage(node_to_ask, image_hash)
return d.addCallback(get_result)
def get_profile(self, node_to_ask):
"""
Downloads the profile from the given node. If the images do not already
exist in cache, it will download and cache them before returning the profile.
"""
def get_result(result):
try:
verify_key = nacl.signing.VerifyKey(node_to_ask.pubkey)
verify_key.verify(result[1][0], result[1][1])
p = objects.Profile()
p.ParseFromString(result[1][0])
if p.pgp_key.public_key:
gpg = gnupg.GPG()
gpg.import_keys(p.pgp_key.publicKey)
if not gpg.verify(p.pgp_key.signature) or \
node_to_ask.id.encode('hex') not in p.pgp_key.signature:
p.ClearField("pgp_key")
if not os.path.isfile(os.path.join(DATA_FOLDER, 'cache', p.avatar_hash.encode("hex"))):
self.get_image(node_to_ask, p.avatar_hash)
if not os.path.isfile(os.path.join(DATA_FOLDER, 'cache', p.header_hash.encode("hex"))):
self.get_image(node_to_ask, p.header_hash)
self.cache(result[1][0], node_to_ask.id.encode("hex") + ".profile")
return p
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
self.log.info("fetching profile from %s" % node_to_ask)
d = self.protocol.callGetProfile(node_to_ask)
return d.addCallback(get_result)
def get_user_metadata(self, node_to_ask):
"""
Downloads just a small portion of the profile (containing the name, handle,
and avatar hash). We need this for some parts of the UI where we list stores.
Since we need fast loading we shouldn't download the full profile here.
It will download the avatar if it isn't already in cache.
"""
def get_result(result):
try:
verify_key = nacl.signing.VerifyKey(node_to_ask.pubkey)
verify_key.verify(result[1][0], result[1][1])
m = objects.Metadata()
m.ParseFromString(result[1][0])
if not os.path.isfile(os.path.join(DATA_FOLDER, 'cache', m.avatar_hash.encode("hex"))):
self.get_image(node_to_ask, m.avatar_hash)
return m
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
self.log.info("fetching user metadata from %s" % node_to_ask)
d = self.protocol.callGetUserMetadata(node_to_ask)
return d.addCallback(get_result)
def get_listings(self, node_to_ask):
"""
Queries a store for it's list of contracts. A `objects.Listings` protobuf
is returned containing some metadata for each contract. The individual contracts
should be fetched with a get_contract call.
"""
def get_result(result):
try:
verify_key = nacl.signing.VerifyKey(node_to_ask.pubkey)
verify_key.verify(result[1][0], result[1][1])
l = objects.Listings()
l.ParseFromString(result[1][0])
return l
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
self.log.info("fetching store listings from %s" % node_to_ask)
d = self.protocol.callGetListings(node_to_ask)
return d.addCallback(get_result)
def get_contract_metadata(self, node_to_ask, contract_hash):
"""
Downloads just the metadata for the contract. Useful for displaying
search results in a list view without downloading the entire contract.
It will download the thumbnail image if it isn't already in cache.
"""
def get_result(result):
try:
verify_key = nacl.signing.VerifyKey(node_to_ask.pubkey)
verify_key.verify(result[1][0], result[1][1])
l = objects.Listings().ListingMetadata()
l.ParseFromString(result[1][0])
if l.thumbnail_hash != "":
if not os.path.isfile(os.path.join(DATA_FOLDER, 'cache', l.thumbnail_hash.encode("hex"))):
self.get_image(node_to_ask, l.thumbnail_hash)
return l
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
self.log.info("fetching metadata for contract %s from %s" % (contract_hash.encode("hex"), node_to_ask))
d = self.protocol.callGetContractMetadata(node_to_ask, contract_hash)
return d.addCallback(get_result)
def make_moderator(self):
"""
Set self as a moderator in the DHT.
"""
u = objects.Profile()
k = u.PublicKey()
k.public_key = unhexlify(bitcointools.bip32_extract_key(KeyChain(self.db).bitcoin_master_pubkey))
k.signature = self.signing_key.sign(k.public_key)[:64]
u.bitcoin_key.MergeFrom(k)
u.moderator = True
Profile(self.db).update(u)
proto = self.kserver.node.getProto().SerializeToString()
self.kserver.set(digest("moderators"), digest(proto), proto)
self.log.info("setting self as moderator on the network")
def unmake_moderator(self):
"""
Deletes our moderator entry from the network.
"""
key = digest(self.kserver.node.getProto().SerializeToString())
signature = self.signing_key.sign(key)[:64]
self.kserver.delete("moderators", key, signature)
Profile(self.db).remove_field("moderator")
self.log.info("removing self as moderator from the network")
def follow(self, node_to_follow):
"""
Sends a follow message to another node in the network. The node must be online
to receive the message. The message contains a signed, serialized `Follower`
protobuf object which the recipient will store and can send to other nodes,
proving you are following them. The response is a signed `Metadata` protobuf
that will store in the db.
"""
def save_to_db(result):
if result[0] and result[1][0] == "True":
try:
u = objects.Following.User()
u.guid = node_to_follow.id
u.pubkey = node_to_follow.pubkey
m = objects.Metadata()
m.ParseFromString(result[1][1])
u.metadata.MergeFrom(m)
u.signature = result[1][2]
verify_key = nacl.signing.VerifyKey(node_to_follow.pubkey)
verify_key.verify(result[1][1], result[1][2])
self.db.follow.follow(u)
return True
except Exception:
return False
else:
return False
proto = Profile(self.db).get(False)
m = objects.Metadata()
m.name = proto.name
m.handle = proto.handle
m.avatar_hash = proto.avatar_hash
m.short_description = proto.short_description
m.nsfw = proto.nsfw
f = objects.Followers.Follower()
f.guid = self.kserver.node.id
f.following = node_to_follow.id
f.pubkey = self.kserver.node.pubkey
f.metadata.MergeFrom(m)
signature = self.signing_key.sign(f.SerializeToString())[:64]
d = self.protocol.callFollow(node_to_follow, f.SerializeToString(), signature)
self.log.info("sending follow request to %s" % node_to_follow)
return d.addCallback(save_to_db)
def unfollow(self, node_to_unfollow):
"""
Sends an unfollow message to a node and removes them from our db.
"""
def save_to_db(result):
try:
if result[0] and result[1][0] == "True":
self.db.follow.unfollow(node_to_unfollow.id)
return True
else:
return False
except Exception:
return False
signature = self.signing_key.sign("unfollow:" + node_to_unfollow.id)[:64]
d = self.protocol.callUnfollow(node_to_unfollow, signature)
self.log.info("sending unfollow request to %s" % node_to_unfollow)
return d.addCallback(save_to_db)
def get_followers(self, node_to_ask, start=0):
"""
Query the given node for a list if its followers. The response will be a
`Followers` protobuf object. We will verify the signature for each follower
to make sure that node really did follower this user.
"""
def get_response(response):
# Verify the signature on the response
f = objects.Followers()
try:
verify_key = nacl.signing.VerifyKey(node_to_ask.pubkey)
verify_key.verify(response[1][0], response[1][1])
f.ParseFromString(response[1][0])
except Exception:
return (None, None)
# Verify the signature and guid of each follower.
count = None
if len(response[1]) > 2:
count = response[1][2]
for follower in f.followers:
try:
v_key = nacl.signing.VerifyKey(follower.pubkey)
signature = follower.signature
follower.ClearField("signature")
v_key.verify(follower.SerializeToString(), signature)
h = nacl.hash.sha512(follower.pubkey)
pow_hash = h[40:]
if int(pow_hash[:6], 16) >= 50 or follower.guid.encode("hex") != h[:40]:
raise Exception('Invalid GUID')
if follower.following != node_to_ask.id:
raise Exception('Invalid follower')
except Exception:
f.followers.remove(follower)
return (f, count)
peer = (node_to_ask.ip, node_to_ask.port)
if peer in self.protocol.multiplexer and \
self.protocol.multiplexer[peer].handler.remote_node_version > 1:
d = self.protocol.callGetFollowers(node_to_ask, start=start)
else:
d = self.protocol.callGetFollowers(node_to_ask)
self.log.info("fetching followers from %s" % node_to_ask)
return d.addCallback(get_response)
def get_following(self, node_to_ask):
"""
Query the given node for a list of users it's following. The return
is `Following` protobuf object that contains signed metadata for each
user this node is following. The signature on the metadata is there to
prevent this node from altering the name/handle/avatar associated with
the guid.
"""
def get_response(response):
# Verify the signature on the response
f = objects.Following()
try:
verify_key = nacl.signing.VerifyKey(node_to_ask.pubkey)
verify_key.verify(response[1][0], response[1][1])
f.ParseFromString(response[1][0])
except Exception:
return None
for user in f.users:
try:
v_key = nacl.signing.VerifyKey(user.pubkey)
signature = user.signature
v_key.verify(user.metadata.SerializeToString(), signature)
h = nacl.hash.sha512(user.pubkey)
pow_hash = h[40:]
if int(pow_hash[:6], 16) >= 50 or user.guid.encode("hex") != h[:40]:
raise Exception('Invalid GUID')
except Exception:
f.users.remove(user)
return f
d = self.protocol.callGetFollowing(node_to_ask)
self.log.info("fetching following list from %s" % node_to_ask)
return d.addCallback(get_response)
def broadcast(self, message):
"""
Sends a broadcast message to all online followers. It will resolve
each guid before sending the broadcast. Messages must be less than
140 characters. Returns the number of followers the broadcast reached.
"""
if len(message) > 140:
return defer.succeed(0)
def send(nodes):
def how_many_reached(responses):
count = 0
for resp in responses:
if resp[1][0] and resp[1][1][0] == "True":
count += 1
return count
ds = []
signature = self.signing_key.sign(str(message))[:64]
for n in nodes:
if n[1] is not None:
ds.append(self.protocol.callBroadcast(n[1], message, signature))
return defer.DeferredList(ds).addCallback(how_many_reached)
dl = []
f = objects.Followers()
f.ParseFromString(self.db.follow.get_followers())
for follower in f.followers:
dl.append(self.kserver.resolve(follower.guid))
self.log.info("broadcasting %s to followers" % message)
return defer.DeferredList(dl).addCallback(send)
def send_message(self, receiving_node, public_key, message_type, message, subject=None, store_only=False):
"""
Sends a message to another node. If the node isn't online it
will be placed in the dht for the node to pick up later.
"""
pro = Profile(self.db).get()
p = objects.PlaintextMessage()
p.sender_guid = self.kserver.node.id
p.pubkey = self.kserver.node.pubkey
p.type = message_type
p.message = str(message)
if subject is not None:
p.subject = subject
if pro.handle:
p.handle = pro.handle
if pro.avatar_hash:
p.avatar_hash = pro.avatar_hash
p.timestamp = int(time.time())
signature = self.signing_key.sign(p.SerializeToString())[:64]
p.signature = signature
skephem = PrivateKey.generate()
pkephem = skephem.public_key.encode(nacl.encoding.RawEncoder)
box = Box(skephem, PublicKey(public_key))
nonce = nacl.utils.random(Box.NONCE_SIZE)
ciphertext = box.encrypt(p.SerializeToString(), nonce)
def get_response(response):
if not response[0]:
ciphertext = box.encrypt(p.SerializeToString().encode("zlib"), nonce)
self.kserver.set(digest(receiving_node.id), pkephem, ciphertext)
self.log.info("sending encrypted message to %s" % receiving_node.id.encode("hex"))
if not store_only:
self.protocol.callMessage(receiving_node, pkephem, ciphertext).addCallback(get_response)
else:
get_response([False])
def get_messages(self, listener):
# if the transport hasn't been initialized yet, wait a second
if self.protocol.multiplexer is None or self.protocol.multiplexer.transport is None:
return task.deferLater(reactor, 1, self.get_messages, listener)
def parse_messages(messages):
if messages is not None:
self.log.info("retrieved %s message(s) from the dht" % len(messages))
for message in messages:
try:
value = objects.Value()
value.ParseFromString(message)
try:
box = Box(self.signing_key.to_curve25519_private_key(), PublicKey(value.valueKey))
ciphertext = value.serializedData
plaintext = box.decrypt(ciphertext).decode("zlib")
p = objects.PlaintextMessage()
p.ParseFromString(plaintext)
signature = p.signature
p.ClearField("signature")
verify_key = nacl.signing.VerifyKey(p.pubkey)
verify_key.verify(p.SerializeToString(), signature)
h = nacl.hash.sha512(p.pubkey)
pow_hash = h[40:]
if int(pow_hash[:6], 16) >= 50 or p.sender_guid.encode("hex") != h[:40]:
raise Exception('Invalid guid')
if p.type == objects.PlaintextMessage.Type.Value("ORDER_CONFIRMATION"):
c = Contract(self.db, hash_value=unhexlify(p.subject),
testnet=self.protocol.multiplexer.testnet)
c.accept_order_confirmation(self.protocol.get_notification_listener(),
confirmation_json=p.message)
elif p.type == objects.PlaintextMessage.Type.Value("RECEIPT"):
c = Contract(self.db, hash_value=unhexlify(p.subject),
testnet=self.protocol.multiplexer.testnet)
c.accept_receipt(self.protocol.get_notification_listener(),
self.protocol.multiplexer.blockchain,
receipt_json=p.message)
elif p.type == objects.PlaintextMessage.Type.Value("DISPUTE_OPEN"):
process_dispute(json.loads(p.message, object_pairs_hook=OrderedDict),
self.db, self.protocol.get_message_listener(),
self.protocol.get_notification_listener(),
self.protocol.multiplexer.testnet)
elif p.type == objects.PlaintextMessage.Type.Value("DISPUTE_CLOSE"):
close_dispute(json.loads(p.message, object_pairs_hook=OrderedDict),
self.db, self.protocol.get_message_listener(),
self.protocol.get_notification_listener(),
self.protocol.multiplexer.testnet)
elif p.type == objects.PlaintextMessage.Type.Value("REFUND"):
refund_json = json.loads(p.message, object_pairs_hook=OrderedDict)
c = Contract(self.db, hash_value=unhexlify(refund_json["refund"]["order_id"]),
testnet=self.protocol.multiplexer.testnet)
c.process_refund(refund_json, self.protocol.multiplexer.blockchain,
self.protocol.get_notification_listener())
else:
listener.notify(p, signature)
except Exception:
pass
signature = self.signing_key.sign(value.valueKey)[:64]
self.kserver.delete(self.kserver.node.id, value.valueKey, signature)
except Exception:
pass
self.kserver.get(self.kserver.node.id, False).addCallback(parse_messages)
def purchase(self, node_to_ask, contract):
"""
Send an order message to the vendor.
Args:
node_to_ask: a `dht.node.Node` object
contract: a complete `Contract` object containing the buyer's order
"""
def parse_response(response):
try:
address = contract.contract["buyer_order"]["order"]["payment"]["address"]
chaincode = contract.contract["buyer_order"]["order"]["payment"]["chaincode"]
masterkey_b = contract.contract["buyer_order"]["order"]["id"]["pubkeys"]["bitcoin"]
buyer_key = derive_childkey(masterkey_b, chaincode)
amount = contract.contract["buyer_order"]["order"]["payment"]["amount"]
listing_hash = contract.contract["vendor_offer"]["listing"]["contract_id"]
verify_key = nacl.signing.VerifyKey(node_to_ask.pubkey)
verify_key.verify(
str(address) + str(amount) + str(listing_hash) + str(buyer_key), response[1][0])
return response[1][0]
except Exception:
return False
public_key = nacl.signing.VerifyKey(contract.contract["vendor_offer"]["listing"]["id"]["pubkeys"]["guid"],
encoder=nacl.encoding.HexEncoder).to_curve25519_public_key()
skephem = PrivateKey.generate()
pkephem = skephem.public_key.encode(nacl.encoding.RawEncoder)
box = Box(skephem, public_key)
nonce = nacl.utils.random(Box.NONCE_SIZE)
ciphertext = box.encrypt(json.dumps(contract.contract, indent=4), nonce)
d = self.protocol.callOrder(node_to_ask, pkephem, ciphertext)
self.log.info("purchasing contract %s from %s" % (contract.get_contract_id(), node_to_ask))
return d.addCallback(parse_response)
def confirm_order(self, guid, contract):
"""
Send the order confirmation over to the buyer. If the buyer isn't
online we will stick it in the DHT temporarily.
"""
def get_node(node_to_ask):
def parse_response(response):
if response[0] and response[1][0] == "True":
return True
elif not response[0]:
contract_dict = json.loads(json.dumps(contract.contract, indent=4),
object_pairs_hook=OrderedDict)
del contract_dict["vendor_order_confirmation"]
order_id = digest(json.dumps(contract_dict, indent=4)).encode("hex")
self.send_message(Node(unhexlify(guid)),
nacl.signing.VerifyKey(
contract.contract["buyer_order"]["order"]["id"]["pubkeys"]["guid"],
encoder=nacl.encoding.HexEncoder).to_curve25519_public_key().encode(),
objects.PlaintextMessage.Type.Value("ORDER_CONFIRMATION"),
json.dumps(contract.contract["vendor_order_confirmation"]),
order_id,
store_only=True)
return True
else:
return response[1][0]
if node_to_ask:
public_key = nacl.signing.VerifyKey(
contract.contract["buyer_order"]["order"]["id"]["pubkeys"]["guid"],
encoder=nacl.encoding.HexEncoder).to_curve25519_public_key()
skephem = PrivateKey.generate()
pkephem = skephem.public_key.encode(nacl.encoding.RawEncoder)
box = Box(skephem, public_key)
nonce = nacl.utils.random(Box.NONCE_SIZE)
ciphertext = box.encrypt(json.dumps(contract.contract, indent=4), nonce)
d = self.protocol.callOrderConfirmation(node_to_ask, pkephem, ciphertext)
return d.addCallback(parse_response)
else:
return parse_response([False])
self.log.info("sending order confirmation to %s" % guid)
return self.kserver.resolve(unhexlify(guid)).addCallback(get_node)
def complete_order(self, guid, contract):
"""
Send the receipt, including the payout signatures and ratings, over to the vendor.
If the vendor isn't online we will stick it in the DHT temporarily.
"""
def get_node(node_to_ask):
def parse_response(response):
if response[0] and response[1][0] == "True":
return True
elif not response[0]:
contract_dict = json.loads(json.dumps(contract.contract, indent=4),
object_pairs_hook=OrderedDict)
del contract_dict["vendor_order_confirmation"]
del contract_dict["buyer_receipt"]
order_id = digest(json.dumps(contract_dict, indent=4)).encode("hex")
self.send_message(Node(unhexlify(guid)),
nacl.signing.VerifyKey(
contract.contract["vendor_offer"]["listing"]["id"]["pubkeys"]["guid"],
encoder=nacl.encoding.HexEncoder).to_curve25519_public_key().encode(),
objects.PlaintextMessage.Type.Value("RECEIPT"),
json.dumps(contract.contract["buyer_receipt"]),
order_id,
store_only=True)
return True
else:
return response[1][0]
if node_to_ask:
public_key = nacl.signing.VerifyKey(
contract.contract["vendor_offer"]["listing"]["id"]["pubkeys"]["guid"],
encoder=nacl.encoding.HexEncoder).to_curve25519_public_key()
skephem = PrivateKey.generate()
pkephem = skephem.public_key.encode(nacl.encoding.RawEncoder)
box = Box(skephem, public_key)
nonce = nacl.utils.random(Box.NONCE_SIZE)
ciphertext = box.encrypt(json.dumps(contract.contract, indent=4), nonce)
d = self.protocol.callCompleteOrder(node_to_ask, pkephem, ciphertext)
return d.addCallback(parse_response)
else:
return parse_response([False])
self.log.info("sending order receipt to %s" % guid)
return self.kserver.resolve(unhexlify(guid)).addCallback(get_node)
def open_dispute(self, order_id, claim):
"""
Given and order ID we will pull the contract from disk and send it along with the claim
to both the moderator and other party to the dispute. If either party isn't online we will stick
it in the DHT for them.
"""
try:
file_path = os.path.join(DATA_FOLDER, "purchases", "in progress", order_id + ".json")
with open(file_path, 'r') as filename:
contract = json.load(filename, object_pairs_hook=OrderedDict)
guid = contract["vendor_offer"]["listing"]["id"]["guid"]
handle = ""
if "blockchain_id" in contract["vendor_offer"]["listing"]["id"]:
handle = contract["vendor_offer"]["listing"]["id"]["blockchain_id"]
guid_key = contract["vendor_offer"]["listing"]["id"]["pubkeys"]["guid"]
proof_sig = self.db.purchases.get_proof_sig(order_id)
except Exception:
try:
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "in progress", order_id + ".json")
with open(file_path, 'r') as filename:
contract = json.load(filename, object_pairs_hook=OrderedDict)
guid = contract["buyer_order"]["order"]["id"]["guid"]
handle = ""
if "blockchain_id" in contract["buyer_order"]["order"]["id"]:
handle = contract["buyer_order"]["order"]["id"]["blockchain_id"]
guid_key = contract["buyer_order"]["order"]["id"]["pubkeys"]["guid"]
proof_sig = None
except Exception:
return False
if "dispute" not in contract:
keychain = KeyChain(self.db)
contract["dispute"] = {}
contract["dispute"]["info"] = {}
contract["dispute"]["info"]["claim"] = claim
contract["dispute"]["info"]["guid"] = keychain.guid.encode("hex")
contract["dispute"]["info"]["avatar_hash"] = Profile(self.db).get().avatar_hash.encode("hex")
if proof_sig:
contract["dispute"]["info"]["proof_sig"] = base64.b64encode(proof_sig)
info = json.dumps(contract["dispute"]["info"], indent=4)
contract["dispute"]["signature"] = base64.b64encode(keychain.signing_key.sign(info)[:64])
with open(file_path, 'wb') as outfile:
outfile.write(json.dumps(contract, indent=4))
if self.db.purchases.get_purchase(order_id) is not None:
self.db.purchases.update_status(order_id, 4)
elif self.db.sales.get_sale(order_id) is not None:
self.db.sales.update_status(order_id, 4)
avatar_hash = Profile(self.db).get().avatar_hash
self.db.messages.save_message(guid, handle, "", order_id, "DISPUTE_OPEN",
claim, time.time(), avatar_hash, "", True)
mod_guid = contract["buyer_order"]["order"]["moderator"]
for mod in contract["vendor_offer"]["listing"]["moderators"]:
if mod["guid"] == mod_guid:
mod_key = mod["pubkeys"]["guid"]
def get_node(node_to_ask, recipient_guid, public_key):
def parse_response(response):
if not response[0]:
self.send_message(Node(unhexlify(recipient_guid)),
nacl.signing.VerifyKey(
public_key,
encoder=nacl.encoding.HexEncoder).to_curve25519_public_key().encode(),
objects.PlaintextMessage.Type.Value("DISPUTE_OPEN"),
json.dumps(contract),
order_id,
store_only=True)
if node_to_ask:
enc_key = nacl.signing.VerifyKey(
public_key, encoder=nacl.encoding.HexEncoder).to_curve25519_public_key()
skephem = PrivateKey.generate()
pkephem = skephem.public_key.encode(nacl.encoding.RawEncoder)
box = Box(skephem, enc_key)
nonce = nacl.utils.random(Box.NONCE_SIZE)
ciphertext = box.encrypt(json.dumps(contract, indent=4), nonce)
d = self.protocol.callDisputeOpen(node_to_ask, pkephem, ciphertext)
return d.addCallback(parse_response)
else:
return parse_response([False])
self.kserver.resolve(unhexlify(guid)).addCallback(get_node, guid, guid_key)
self.kserver.resolve(unhexlify(mod_guid)).addCallback(get_node, mod_guid, mod_key)
def close_dispute(self, order_id, resolution, buyer_percentage,
vendor_percentage, moderator_percentage, moderator_address):
"""
Called when a moderator closes a dispute. It will create a payout transactions refunding both
parties and send it to them in a dispute_close message.
"""
if float(vendor_percentage) < 0 or float(moderator_percentage) < 0 or float(buyer_percentage) < 0:
raise Exception("Payouts percentages must be positive")
if float(vendor_percentage) + float(buyer_percentage) > 1:
raise Exception("Payout exceeds 100% of value")
if not self.protocol.multiplexer.blockchain.connected:
raise Exception("Libbitcoin server not online")
if not self.protocol.multiplexer.testnet and \
not (moderator_address[:1] == "1" or moderator_address[:1] == "3"):
raise Exception("Bitcoin address is not a mainnet address")
elif self.protocol.multiplexer.testnet and not \
(moderator_address[:1] == "n" or moderator_address[:1] == "m" or moderator_address[:1] == "2"):
raise Exception("Bitcoin address is not a testnet address")
try:
bitcointools.b58check_to_hex(moderator_address)
except AssertionError:
raise Exception("Invalid Bitcoin address")
with open(os.path.join(DATA_FOLDER, "cases", order_id + ".json"), "r") as filename:
contract = json.load(filename, object_pairs_hook=OrderedDict)
buyer_address = contract["buyer_order"]["order"]["refund_address"]
buyer_guid = contract["buyer_order"]["order"]["id"]["guid"]
buyer_enc_key = nacl.signing.VerifyKey(
contract["buyer_order"]["order"]["id"]["pubkeys"]["guid"],
encoder=nacl.encoding.HexEncoder).to_curve25519_public_key()
vendor_guid = contract["vendor_offer"]["listing"]["id"]["guid"]
vendor_enc_key = nacl.signing.VerifyKey(
contract["vendor_offer"]["listing"]["id"]["pubkeys"]["guid"],
encoder=nacl.encoding.HexEncoder).to_curve25519_public_key()
payment_address = contract["buyer_order"]["order"]["payment"]["address"]
def history_fetched(ec, history):
outpoints = []
satoshis = 0
outputs = []
dispute_json = {"dispute_resolution": {"resolution": {}}}
timeout.cancel()
if ec:
print ec
else:
for tx_type, txid, i, height, value in history: # pylint: disable=W0612
if tx_type == obelisk.PointIdent.Output:
satoshis += value
o = {
"txid": txid.encode("hex"),
"vout": i,
"value": value,
"scriptPubKey": "00"
}
if o not in outpoints:
outpoints.append(o)
if satoshis <= 0:
d.callback(False)
else:
satoshis -= TRANSACTION_FEE
moderator_fee = int(float(moderator_percentage) * satoshis)
satoshis -= moderator_fee
if moderator_fee > 0:
outputs.append({'value': moderator_fee, 'address': moderator_address})
dispute_json["dispute_resolution"]["resolution"]["moderator_address"] = moderator_address
dispute_json["dispute_resolution"]["resolution"]["moderator_fee"] = \
round(moderator_fee / float(100000000), 8)
dispute_json["dispute_resolution"]["resolution"]["transaction_fee"] = \
round(TRANSACTION_FEE / float(100000000), 8)
if float(buyer_percentage) > 0:
amt = int(float(buyer_percentage) * satoshis)
dispute_json["dispute_resolution"]["resolution"]["buyer_payout"] = \
round(amt / float(100000000), 8)
outputs.append({'value': amt,
'address': buyer_address})
if float(vendor_percentage) > 0:
amt = int(float(vendor_percentage) * satoshis)
dispute_json["dispute_resolution"]["resolution"]["vendor_payout"] = \
round(amt / float(100000000), 8)
outputs.append({'value': amt,
'address': vendor_address})
tx = BitcoinTransaction.make_unsigned(outpoints, outputs,
testnet=self.protocol.multiplexer.testnet)
chaincode = contract["buyer_order"]["order"]["payment"]["chaincode"]
redeem_script = str(contract["buyer_order"]["order"]["payment"]["redeem_script"])
masterkey_m = bitcointools.bip32_extract_key(KeyChain(self.db).bitcoin_master_privkey)
moderator_priv = derive_childkey(masterkey_m, chaincode, bitcointools.MAINNET_PRIVATE)
signatures = tx.create_signature(moderator_priv, redeem_script)
dispute_json["dispute_resolution"]["resolution"]["order_id"] = order_id
dispute_json["dispute_resolution"]["resolution"]["tx_signatures"] = signatures
dispute_json["dispute_resolution"]["resolution"]["claim"] = self.db.cases.get_claim(order_id)
dispute_json["dispute_resolution"]["resolution"]["decision"] = resolution
dispute_json["dispute_resolution"]["signature"] = \
base64.b64encode(KeyChain(self.db).signing_key.sign(json.dumps(
dispute_json["dispute_resolution"]["resolution"], indent=4))[:64])
contract["dispute_resolution"] = dispute_json["dispute_resolution"]
with open(DATA_FOLDER + "cases/" + order_id + ".json", 'wb') as outfile:
outfile.write(json.dumps(contract, indent=4))
send(dispute_json)
def send(dispute_json):
def get_node(node_to_ask, recipient_guid, public_key):
def parse_response(response):
if not response[0]:
self.send_message(Node(unhexlify(recipient_guid)),
public_key.encode(),
objects.PlaintextMessage.Type.Value("DISPUTE_CLOSE"),
dispute_json,
order_id,
store_only=True)
if node_to_ask:
skephem = PrivateKey.generate()
pkephem = skephem.public_key.encode(nacl.encoding.RawEncoder)
box = Box(skephem, public_key)
nonce = nacl.utils.random(Box.NONCE_SIZE)
ciphertext = box.encrypt(json.dumps(dispute_json, indent=4), nonce)
self.protocol.callDisputeClose(node_to_ask, pkephem, ciphertext).addCallback(parse_response)
else:
parse_response([False])
self.kserver.resolve(unhexlify(vendor_guid)).addCallback(get_node, vendor_guid, vendor_enc_key)
self.kserver.resolve(unhexlify(buyer_guid)).addCallback(get_node, buyer_guid, buyer_enc_key)
self.db.cases.update_status(order_id, 1)
d.callback(True)
d = defer.Deferred()
if "dispute_resolution" not in contract:
if "vendor_order_confirmation" in contract and float(vendor_percentage) > 0:
vendor_address = contract["vendor_order_confirmation"]["invoice"]["payout"]["address"]
elif "vendor_order_confirmation" not in contract and float(vendor_percentage) > 0:
raise Exception("Cannot refund seller before order confirmation is sent")
def libbitcoin_timeout():
d.callback("timed out")
timeout = reactor.callLater(5, libbitcoin_timeout)
self.protocol.multiplexer.blockchain.fetch_history2(payment_address, history_fetched)
else:
dispute_json = {"dispute_resolution": contract["dispute_resolution"]}
send(dispute_json)
return d
def release_funds(self, order_id):
"""
This function should be called to release funds from a disputed contract after
the moderator has resolved the dispute and provided his signature.
"""
if os.path.exists(os.path.join(DATA_FOLDER, "purchases", "in progress", order_id + ".json")):
file_path = os.path.join(DATA_FOLDER, "purchases", "in progress", order_id + ".json")
outpoints = json.loads(self.db.purchases.get_outpoint(order_id))
elif os.path.exists(os.path.join(DATA_FOLDER, "store", "contracts", "in progress", order_id + ".json")):
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "in progress", order_id + ".json")
outpoints = json.loads(self.db.sales.get_outpoint(order_id))
with open(file_path, 'r') as filename:
contract = json.load(filename, object_pairs_hook=OrderedDict)
outputs = []
moderator_fee = int(round(float(contract["dispute_resolution"]
["resolution"]["moderator_fee"]) * 100000000))
if moderator_fee > 0:
outputs.append({'value': moderator_fee,
'address': contract["dispute_resolution"]["resolution"]["moderator_address"]})
if "buyer_payout" in contract["dispute_resolution"]["resolution"]:
buyer_address = contract["buyer_order"]["order"]["refund_address"]
outputs.append({'value': int(round(float(contract["dispute_resolution"]
["resolution"]["buyer_payout"]) * 100000000)),
'address': buyer_address})
if "vendor_payout" in contract["dispute_resolution"]["resolution"]:
vendor_address = contract["vendor_order_confirmation"]["invoice"]["payout"]["address"]
outputs.append({'value': int(round(float(contract["dispute_resolution"]
["resolution"]["vendor_payout"]) * 100000000)),
'address': vendor_address})
tx = BitcoinTransaction.make_unsigned(outpoints, outputs, testnet=self.protocol.multiplexer.testnet)
chaincode = contract["buyer_order"]["order"]["payment"]["chaincode"]
redeem_script = str(contract["buyer_order"]["order"]["payment"]["redeem_script"])
masterkey = bitcointools.bip32_extract_key(KeyChain(self.db).bitcoin_master_privkey)
childkey = derive_childkey(masterkey, chaincode, bitcointools.MAINNET_PRIVATE)
own_sig = tx.create_signature(childkey, redeem_script)
signatures = []
for index in range(0, len(outpoints)):
sig_ob = {"index": index, "signatures": []}
for s in own_sig:
if int(s["index"]) == index:
sig_ob["signatures"].append(s["signature"])
for s in contract["dispute_resolution"]["resolution"]["tx_signatures"]:
if int(s["index"]) == index:
sig_ob["signatures"].append(s["signature"])
signatures.append(sig_ob)
tx.multisign(signatures, redeem_script)
tx.broadcast(self.protocol.multiplexer.blockchain)
self.db.transactions.add_transaction(tx.to_raw_tx())
self.log.info("broadcasting payout tx %s to network" % tx.get_hash())
if self.db.purchases.get_purchase(order_id) is not None:
self.db.purchases.update_status(order_id, 6)
elif self.db.sales.get_sale(order_id) is not None:
self.db.sales.update_status(order_id, 6)
def get_ratings(self, node_to_ask, listing_hash=None):
"""
Query the given node for a listing of ratings/reviews for the given listing.
"""
def get_result(result):
try:
verify_key = nacl.signing.VerifyKey(node_to_ask.pubkey)
verify_key.verify(result[1][0], result[1][1])
ratings = json.loads(result[1][0].decode("zlib"), object_pairs_hook=OrderedDict)
ret = []
for rating in ratings:
address = rating["tx_summary"]["address"]
buyer_key = rating["tx_summary"]["buyer_key"]
amount = rating["tx_summary"]["amount"]
listing_hash = rating["tx_summary"]["listing"]
proof_sig = rating["tx_summary"]["proof_of_tx"]
try:
verify_key.verify(str(address) + str(amount) + str(listing_hash) + str(buyer_key),
base64.b64decode(proof_sig))
valid = bitcointools.ecdsa_raw_verify(json.dumps(rating["tx_summary"], indent=4),
bitcointools.decode_sig(rating["signature"]),
buyer_key)
if not valid:
raise Exception("Bitcoin signature not valid")
if "buyer_guid" in rating["tx_summary"] or "buyer_guid_key" in rating["tx_summary"]:
buyer_key_bin = unhexlify(rating["tx_summary"]["buyer_guid_key"])
buyer_key = nacl.signing.VerifyKey(buyer_key_bin)
buyer_key.verify(json.dumps(rating["tx_summary"], indent=4),
base64.b64decode(rating["guid_signature"]))
h = nacl.hash.sha512(buyer_key_bin)
pow_hash = h[40:]
if int(pow_hash[:6], 16) >= 50 or rating["tx_summary"]["buyer_guid"] != h[:40]:
raise Exception('Invalid GUID')
ret.append(rating)
except Exception:
pass
return ret
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
a = "ALL" if listing_hash is None else listing_hash.encode("hex")
self.log.info("fetching ratings for contract %s from %s" % (a, node_to_ask))
d = self.protocol.callGetRatings(node_to_ask, listing_hash)
return d.addCallback(get_result)
def refund(self, order_id):
"""
Refund the given order_id. If this is a direct payment he transaction will be
immediately broadcast to the Bitcoin network otherwise the refund message sent
to the buyer with contain the signature.
"""
file_path = os.path.join(DATA_FOLDER + "store", "contracts", "in progress", order_id + ".json")
if not os.path.exists(file_path):
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "trade receipts", order_id + ".json")
outpoints = json.loads(self.db.sales.get_outpoint(order_id))
with open(file_path, 'r') as filename:
contract = json.load(filename, object_pairs_hook=OrderedDict)
buyer_guid = contract["buyer_order"]["order"]["id"]["guid"]
buyer_enc_key = nacl.signing.VerifyKey(
contract["buyer_order"]["order"]["id"]["pubkeys"]["guid"],
encoder=nacl.encoding.HexEncoder).to_curve25519_public_key()
if "refund" in contract:
refund_json = {"refund": contract["refund"]}
else:
refund_address = contract["buyer_order"]["order"]["refund_address"]
chaincode = contract["buyer_order"]["order"]["payment"]["chaincode"]
masterkey_v = bitcointools.bip32_extract_key(KeyChain(self.db).bitcoin_master_privkey)
vendor_priv = derive_childkey(masterkey_v, chaincode, bitcointools.MAINNET_PRIVATE)
refund_json = {"refund": {}}
refund_json["refund"]["order_id"] = order_id
if "moderator" in contract["buyer_order"]["order"]:
in_value = 0
for outpoint in outpoints:
in_value += outpoint["value"]
out_value = in_value - long(contract["buyer_order"]["order"]["payment"]["refund_tx_fee"])
tx = BitcoinTransaction.make_unsigned(outpoints, refund_address,
testnet=self.protocol.multiplexer.testnet,
out_value=out_value)
redeem_script = contract["buyer_order"]["order"]["payment"]["redeem_script"]
sigs = tx.create_signature(vendor_priv, redeem_script)
refund_json["refund"]["value"] = round(tx.get_out_value() / float(100000000), 8)
refund_json["refund"]["signature(s)"] = sigs
else:
tx = BitcoinTransaction.make_unsigned(outpoints, refund_address,
testnet=self.protocol.multiplexer.testnet)
tx.sign(vendor_priv)
tx.broadcast(self.protocol.multiplexer.blockchain)
self.db.transactions.add_transaction(tx.to_raw_tx())
self.log.info("broadcasting refund tx %s to network" % tx.get_hash())
refund_json["refund"]["txid"] = tx.get_hash()
contract["refund"] = refund_json["refund"]
self.db.sales.update_status(order_id, 7)
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "trade receipts", order_id + ".json")
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(contract, indent=4))
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "in progress", order_id + ".json")
if os.path.exists(file_path):
os.remove(file_path)
def get_node(node_to_ask):
def parse_response(response):
if response[0] and response[1][0] == "True":
return True
if not response[0]:
self.send_message(Node(unhexlify(buyer_guid)),
buyer_enc_key.encode(),
objects.PlaintextMessage.Type.Value("REFUND"),
refund_json,
order_id,
store_only=True)
return True
else:
return response[1][0]
if node_to_ask:
skephem = PrivateKey.generate()
pkephem = skephem.public_key.encode(nacl.encoding.RawEncoder)
box = Box(skephem, buyer_enc_key)
nonce = nacl.utils.random(Box.NONCE_SIZE)
ciphertext = box.encrypt(json.dumps(refund_json, indent=4), nonce)
d = self.protocol.callRefund(node_to_ask, pkephem, ciphertext)
return d.addCallback(parse_response)
else:
return parse_response([False])
self.log.info("sending refund message to %s" % buyer_guid)
return self.kserver.resolve(unhexlify(buyer_guid)).addCallback(get_node)
def update_listings(self):
try:
if self.protocol.multiplexer is None:
return reactor.callLater(1, self.update_listings)
fname = os.path.join(DATA_FOLDER, "store", "listings.pickle")
if os.path.exists(fname):
with open(fname, 'r') as f:
data = pickle.load(f)
else:
data = {}
l = objects.Listings()
l.ParseFromString(self.db.listings.get_proto())
for listing in l.listing:
contract_hash = listing.contract_hash
c = Contract(self.db, hash_value=contract_hash, testnet=self.protocol.multiplexer.testnet)
if contract_hash not in data or time.time() - data[contract_hash] > 500000:
for keyword in c.contract["vendor_offer"]["listing"]["item"]["keywords"]:
self.kserver.set(digest(keyword.lower()), unhexlify(c.get_contract_id()),
self.kserver.node.getProto().SerializeToString())
data[contract_hash] = time.time()
if c.check_expired():
c.delete(True)
if contract_hash in data:
del data[contract_hash]
guid = KeyChain(self.db).guid
moderator = Profile(self.db).get().moderator
if (guid not in data or time.time() - data[guid] > 500000) and moderator:
self.make_moderator()
data[guid] = time.time()
with open(fname, 'w') as f:
pickle.dump(data, f)
except Exception:
pass
def update_moderators_on_listings(self, moderator_list):
try:
l = objects.Listings()
l.ParseFromString(self.db.listings.get_proto())
except Exception:
return
keychain = KeyChain(self.db)
for listing in l.listing:
try:
contract_hash = listing.contract_hash
c = Contract(self.db, hash_value=contract_hash, testnet=self.protocol.multiplexer.testnet)
contract_moderators = []
if "moderators" in c.contract["vendor_offer"]["listing"]:
for m in c.contract["vendor_offer"]["listing"]["moderators"]:
contract_moderators.append(m["guid"])
mods_to_remove = list(set(contract_moderators) - set(moderator_list))
mods_to_add = list(set(moderator_list) - set(contract_moderators))
for mod in mods_to_add:
mod_info = self.db.moderators.get_moderator(mod)
if mod_info is not None:
moderator_json = {
"guid": mod,
"name": mod_info[5],
"avatar": mod_info[7].encode("hex"),
"short_description": mod_info[6],
"fee": str(mod_info[8]) + "%",
"blockchain_id": mod_info[4],
"pubkeys": {
"guid": mod_info[1].encode("hex"),
"bitcoin": {
"key": mod_info[2].encode("hex"),
"signature": base64.b64encode(mod_info[3])
}
}
}
if "moderators" not in c.contract["vendor_offer"]["listing"]:
c.contract["vendor_offer"]["listing"]["moderators"] = []
c.contract["vendor_offer"]["listing"]["moderators"].append(moderator_json)
for mod in mods_to_remove:
for rem in c.contract["vendor_offer"]["listing"]["moderators"]:
if rem["guid"] == mod:
c.contract["vendor_offer"]["listing"]["moderators"].remove(rem)
if "moderators" in c.contract["vendor_offer"]["listing"] and \
len(c.contract["vendor_offer"]["listing"]["moderators"]) == 0:
del c.contract["vendor_offer"]["listing"]["moderators"]
listing = json.dumps(c.contract["vendor_offer"]["listing"], indent=4)
c.contract["vendor_offer"]["signatures"] = {}
c.contract["vendor_offer"]["signatures"]["guid"] = \
base64.b64encode(keychain.signing_key.sign(listing)[:64])
c.contract["vendor_offer"]["signatures"]["bitcoin"] = \
bitcointools.encode_sig(*bitcointools.ecdsa_raw_sign(
listing, bitcointools.bip32_extract_key(keychain.bitcoin_master_privkey)))
c.previous_title = None
c.save()
except Exception:
pass
@staticmethod
def cache(file_to_save, filename):
"""
Saves the file to a cache folder override previous versions if any.
"""
with open(os.path.join(DATA_FOLDER, "cache", filename), 'wb') as outfile:
outfile.write(file_to_save)
@staticmethod
def load_from_cache(filename):
"""
Loads a file from cache
"""
filepath = DATA_FOLDER + "cache/" + filename
if not os.path.exists(filepath):
return None
with open(filepath, "r") as filename:
f = filename.read()
return f
| {
"repo_name": "tomgalloway/OpenBazaar-Server",
"path": "market/network.py",
"copies": "1",
"size": "64061",
"license": "mit",
"hash": 4884986703020986000,
"line_mean": 49.7212984956,
"line_max": 114,
"alpha_frac": 0.5388145674,
"autogenerated": false,
"ratio": 4.3448860553445465,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5383700622744547,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import base64
import bitcointools
import json
import nacl.encoding
import nacl.signing
import os
import random
import re
import time
from binascii import unhexlify
from bitcoin import SelectParams
from bitcoin.core.script import CScript, OP_2, OP_3, OP_CHECKMULTISIG
from bitcoin.wallet import P2SHBitcoinAddress, P2PKHBitcoinAddress, CBitcoinAddress
from collections import OrderedDict
from config import DATA_FOLDER, TRANSACTION_FEE
from copy import deepcopy
from datetime import datetime
from dht.utils import digest
from hashlib import sha256
from keys.bip32utils import derive_childkey
from keys.keychain import KeyChain
from log import Logger
from market.profile import Profile
from market.btcprice import BtcPrice
from market.transactions import BitcoinTransaction
from protos.countries import CountryCode
from protos.objects import Listings
from market.smtpnotification import SMTPNotification
class Contract(object):
"""
A class for creating and interacting with OpenBazaar Ricardian contracts.
"""
def __init__(self, database, contract=None, hash_value=None, testnet=False):
"""
This class can be instantiated with either an `OrderedDict` or a hash
of a contract. If a hash is used, we will load the contract from either
the file system or cache.
Alternatively, pass in no parameters if the intent is to create a new
contract.
Args:
contract: an `OrderedDict` containing a filled out json contract
hash_value: a hash160 of a contract
testnet: is this contract on the testnet
"""
self.db = database
self.keychain = KeyChain(self.db)
if contract is not None:
self.contract = contract
elif hash_value is not None:
try:
file_path = self.db.filemap.get_file(hash_value.encode("hex"))
if file_path is None:
file_path = os.path.join(DATA_FOLDER, "cache", hash_value.encode("hex"))
with open(file_path, 'r') as filename:
self.contract = json.load(filename, object_pairs_hook=OrderedDict)
except Exception:
file_name = hash_value.encode("hex") + ".json"
if os.path.exists(os.path.join(DATA_FOLDER, "purchases", "unfunded", file_name)):
file_path = os.path.join(DATA_FOLDER, "purchases", "unfunded", file_name)
elif os.path.exists(os.path.join(DATA_FOLDER, "purchases", "in progress", file_name)):
file_path = os.path.join(DATA_FOLDER, "purchases", "in progress", file_name)
elif os.path.exists(os.path.join(DATA_FOLDER, "purchases", "trade receipts", file_name)):
file_path = os.path.join(DATA_FOLDER, "purchases", "trade receipts", file_name)
elif os.path.exists(os.path.join(DATA_FOLDER, "store", "contracts", "unfunded", file_name)):
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "unfunded", file_name)
elif os.path.exists(os.path.join(DATA_FOLDER, "store", "contracts", "in progress", file_name)):
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "in progress", file_name)
elif os.path.exists(os.path.join(DATA_FOLDER, "store", "contracts", "trade receipts", file_name)):
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "trade receipts", file_name)
try:
with open(file_path, 'r') as filename:
self.contract = json.load(filename, object_pairs_hook=OrderedDict)
except Exception:
self.contract = {}
else:
self.contract = {}
self.log = Logger(system=self)
# used when purchasing this contract
self.testnet = testnet
self.notification_listener = None
self.blockchain = None
self.amount_funded = 0
self.received_txs = []
self.is_purchase = False
self.outpoints = []
def create(self,
expiration_date,
metadata_category,
title,
description,
currency_code,
price,
process_time,
nsfw,
shipping_origin=None,
shipping_regions=None,
est_delivery_domestic=None,
est_delivery_international=None,
terms_conditions=None,
returns=None,
keywords=None,
category=None,
condition=None,
sku=None,
images=None,
free_shipping=None,
shipping_currency_code=None,
shipping_domestic=None,
shipping_international=None,
options=None,
moderators=None,
contract_id=None):
"""
All parameters are strings except:
:param expiration_date: `string` (must be formatted UTC datetime)
:param keywords: `list`
:param nsfw: `boolean`
:param images: a `list` of image files
:param free_shipping: `boolean`
:param shipping_origin: a 'string' formatted `CountryCode`
:param shipping_regions: a 'list' of 'string' formatted `CountryCode`s
:param options: a 'dict' containing options as keys and 'list' as option values.
:param moderators: a 'list' of 'string' guids (hex encoded).
"""
profile = Profile(self.db).get()
if contract_id is not None and contract_id != "":
self.previous_title = self.contract["vendor_offer"]["listing"]["item"]["title"]
else:
self.previous_title = None
contract_id = digest(random.getrandbits(255)).encode("hex")
self.contract = OrderedDict(
{
"vendor_offer": {
"listing": {
"contract_id": contract_id,
"metadata": {
"version": "1",
"category": metadata_category.lower(),
"category_sub": "fixed price",
"last_modified": int(time.time())
},
"id": {
"guid": self.keychain.guid.encode("hex"),
"pubkeys": {
"guid": self.keychain.verify_key.encode(encoder=nacl.encoding.HexEncoder),
"bitcoin": bitcointools.bip32_extract_key(self.keychain.bitcoin_master_pubkey)
}
},
"item": {
"title": title,
"description": description,
"process_time": process_time,
"price_per_unit": {},
"nsfw": nsfw
}
}
}
}
)
if expiration_date == "":
self.contract["vendor_offer"]["listing"]["metadata"]["expiry"] = "never"
else:
self.contract["vendor_offer"]["listing"]["metadata"]["expiry"] = expiration_date + " UTC"
if metadata_category == "physical good" and condition is not None:
self.contract["vendor_offer"]["listing"]["item"]["condition"] = condition
if currency_code.upper() == "BTC":
item = self.contract["vendor_offer"]["listing"]["item"]
item["price_per_unit"]["bitcoin"] = round(float(price), 8)
else:
item = self.contract["vendor_offer"]["listing"]["item"]
item["price_per_unit"]["fiat"] = {}
item["price_per_unit"]["fiat"]["price"] = price
item["price_per_unit"]["fiat"]["currency_code"] = currency_code
if keywords is not None:
self.contract["vendor_offer"]["listing"]["item"]["keywords"] = []
self.contract["vendor_offer"]["listing"]["item"]["keywords"].extend(keywords)
if category is not None:
self.contract["vendor_offer"]["listing"]["item"]["category"] = category
if sku is not None:
self.contract["vendor_offer"]["listing"]["item"]["sku"] = sku
if options is not None:
self.contract["vendor_offer"]["listing"]["item"]["options"] = options
if metadata_category == "physical good":
self.contract["vendor_offer"]["listing"]["shipping"] = {}
shipping = self.contract["vendor_offer"]["listing"]["shipping"]
shipping["shipping_origin"] = shipping_origin
if free_shipping is False:
self.contract["vendor_offer"]["listing"]["shipping"]["free"] = False
self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"] = {}
if shipping_currency_code == "BTC":
self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["bitcoin"] = {}
self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["bitcoin"][
"domestic"] = shipping_domestic
self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["bitcoin"][
"international"] = shipping_international
else:
shipping = self.contract["vendor_offer"]["listing"]["shipping"]
shipping["flat_fee"]["fiat"] = {}
shipping["flat_fee"]["fiat"]["price"] = {}
shipping["flat_fee"]["fiat"]["price"][
"domestic"] = shipping_domestic
shipping["flat_fee"]["fiat"]["price"][
"international"] = shipping_international
shipping["flat_fee"]["fiat"][
"currency_code"] = shipping_currency_code
else:
self.contract["vendor_offer"]["listing"]["shipping"]["free"] = True
self.contract["vendor_offer"]["listing"]["shipping"]["shipping_regions"] = []
for region in shipping_regions:
shipping = self.contract["vendor_offer"]["listing"]["shipping"]
shipping["shipping_regions"].append(region)
listing = self.contract["vendor_offer"]["listing"]
listing["shipping"]["est_delivery"] = {}
listing["shipping"]["est_delivery"]["domestic"] = est_delivery_domestic
listing["shipping"]["est_delivery"][
"international"] = est_delivery_international
if profile.handle != "":
self.contract["vendor_offer"]["listing"]["id"]["blockchain_id"] = profile.handle
if images is not None:
self.contract["vendor_offer"]["listing"]["item"]["image_hashes"] = []
for image_hash in images:
if len(image_hash) != 40:
raise Exception("Invalid image hash")
self.contract["vendor_offer"]["listing"]["item"]["image_hashes"].append(image_hash)
if terms_conditions is not None or returns is not None:
self.contract["vendor_offer"]["listing"]["policy"] = {}
if terms_conditions is not None:
self.contract["vendor_offer"]["listing"]["policy"]["terms_conditions"] = terms_conditions
if returns is not None:
self.contract["vendor_offer"]["listing"]["policy"]["returns"] = returns
if moderators is not None:
self.contract["vendor_offer"]["listing"]["moderators"] = []
for mod in moderators:
mod_info = self.db.moderators.get_moderator(mod)
if mod_info is not None:
moderator = {
"guid": mod,
"name": mod_info[5],
"avatar": mod_info[7].encode("hex"),
"short_description": mod_info[6],
"fee": str(mod_info[8]) + "%",
"blockchain_id": mod_info[4],
"pubkeys": {
"guid": mod_info[1].encode("hex"),
"bitcoin": {
"key": mod_info[2].encode("hex"),
"signature": base64.b64encode(mod_info[3])
}
}
}
self.contract["vendor_offer"]["listing"]["moderators"].append(moderator)
listing = json.dumps(self.contract["vendor_offer"]["listing"], indent=4)
self.contract["vendor_offer"]["signatures"] = {}
self.contract["vendor_offer"]["signatures"]["guid"] = \
base64.b64encode(self.keychain.signing_key.sign(listing)[:64])
self.contract["vendor_offer"]["signatures"]["bitcoin"] = \
bitcointools.encode_sig(*bitcointools.ecdsa_raw_sign(
listing, bitcointools.bip32_extract_key(self.keychain.bitcoin_master_privkey)))
self.save()
def add_purchase_info(self,
quantity,
refund_address,
ship_to=None,
shipping_address=None,
city=None,
state=None,
postal_code=None,
country=None,
moderator=None,
options=None):
"""
Update the contract with the buyer's purchase information.
"""
if not self.testnet and not (refund_address[:1] == "1" or refund_address[:1] == "3"):
raise Exception("Bitcoin address is not a mainnet address")
elif self.testnet and not \
(refund_address[:1] == "n" or refund_address[:1] == "m" or refund_address[:1] == "2"):
raise Exception("Bitcoin address is not a testnet address")
try:
bitcointools.b58check_to_hex(refund_address)
except AssertionError:
raise Exception("Invalid Bitcoin address")
profile = Profile(self.db).get()
order_json = {
"buyer_order": {
"order": {
"ref_hash": digest(json.dumps(self.contract, indent=4)).encode("hex"),
"date": str(datetime.utcnow()) + " UTC",
"quantity": quantity,
"id": {
"guid": self.keychain.guid.encode("hex"),
"pubkeys": {
"guid": self.keychain.verify_key.encode(encoder=nacl.encoding.HexEncoder),
"bitcoin": bitcointools.bip32_extract_key(self.keychain.bitcoin_master_pubkey),
}
},
"payment": {},
"refund_address": refund_address
}
}
}
SelectParams("testnet" if self.testnet else "mainnet")
if profile.handle != "":
order_json["buyer_order"]["order"]["id"]["blockchain_id"] = profile.handle
if self.contract["vendor_offer"]["listing"]["metadata"]["category"] == "physical good":
order_json["buyer_order"]["order"]["shipping"] = {}
order_json["buyer_order"]["order"]["shipping"]["ship_to"] = ship_to
order_json["buyer_order"]["order"]["shipping"]["address"] = shipping_address
order_json["buyer_order"]["order"]["shipping"]["city"] = city
order_json["buyer_order"]["order"]["shipping"]["state"] = state
order_json["buyer_order"]["order"]["shipping"]["postal_code"] = postal_code
order_json["buyer_order"]["order"]["shipping"]["country"] = country
if options is not None:
order_json["buyer_order"]["order"]["options"] = options
if moderator:
chaincode = sha256(str(random.getrandbits(256))).digest().encode("hex")
order_json["buyer_order"]["order"]["payment"]["chaincode"] = chaincode
valid_mod = False
for mod in self.contract["vendor_offer"]["listing"]["moderators"]:
if mod["guid"] == moderator:
order_json["buyer_order"]["order"]["moderator"] = moderator
masterkey_m = mod["pubkeys"]["bitcoin"]["key"]
valid_mod = True
if not valid_mod:
return False
masterkey_b = bitcointools.bip32_extract_key(self.keychain.bitcoin_master_pubkey)
masterkey_v = self.contract["vendor_offer"]["listing"]["id"]["pubkeys"]["bitcoin"]
buyer_key = unhexlify(derive_childkey(masterkey_b, chaincode))
vendor_key = unhexlify(derive_childkey(masterkey_v, chaincode))
moderator_key = unhexlify(derive_childkey(masterkey_m, chaincode))
redeem_script = CScript([OP_2, buyer_key, vendor_key, moderator_key, OP_3, OP_CHECKMULTISIG])
order_json["buyer_order"]["order"]["payment"]["redeem_script"] = redeem_script.encode("hex")
payment_address = str(P2SHBitcoinAddress.from_redeemScript(redeem_script))
order_json["buyer_order"]["order"]["payment"]["address"] = payment_address
order_json["buyer_order"]["order"]["payment"]["refund_tx_fee"] = TRANSACTION_FEE
else:
chaincode = sha256(str(random.getrandbits(256))).digest().encode("hex")
order_json["buyer_order"]["order"]["payment"]["chaincode"] = chaincode
masterkey_v = self.contract["vendor_offer"]["listing"]["id"]["pubkeys"]["bitcoin"]
vendor_key = unhexlify(derive_childkey(masterkey_v, chaincode))
payment_address = str(P2PKHBitcoinAddress.from_pubkey(vendor_key))
order_json["buyer_order"]["order"]["payment"]["address"] = payment_address
price_json = self.contract["vendor_offer"]["listing"]["item"]["price_per_unit"]
if "bitcoin" in price_json:
amount_to_pay = float(price_json["bitcoin"]) * quantity
else:
currency_code = price_json["fiat"]["currency_code"]
fiat_price = price_json["fiat"]["price"]
conversion_rate = BtcPrice.instance().get(currency_code.upper())
amount_to_pay = float("{0:.8f}".format(float(fiat_price) / float(conversion_rate))) * quantity
if "shipping" in self.contract["vendor_offer"]["listing"]:
if not self.contract["vendor_offer"]["listing"]["shipping"]["free"]:
shipping_origin = str(self.contract["vendor_offer"]["listing"]["shipping"][
"shipping_origin"].upper())
if shipping_origin == country.upper():
if "bitcoin" in self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]:
shipping_amount = float(self.contract["vendor_offer"]["listing"][
"shipping"]["flat_fee"]["bitcoin"]["domestic"]) * quantity
else:
price = self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["fiat"][
"price"]["domestic"]
currency = self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"][
"fiat"]["currency_code"]
conversion_rate = BtcPrice.instance().get(currency.upper(), False)
shipping_amount = float("{0:.8f}".format(float(price) / float(conversion_rate))) * quantity
else:
if "bitcoin" in self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]:
shipping_amount = float(self.contract["vendor_offer"]["listing"]["shipping"][
"flat_fee"]["bitcoin"]["international"]) * quantity
else:
price = self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["fiat"][
"price"]["international"]
currency = self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"][
"fiat"]["currency_code"]
conversion_rate = BtcPrice.instance().get(currency.upper(), False)
shipping_amount = float("{0:.8f}".format(float(price) / float(conversion_rate))) * quantity
amount_to_pay += shipping_amount
if round(amount_to_pay, 8) < round(TRANSACTION_FEE / float(100000000), 8):
raise Exception("Contract price is below transaction fee.")
order_json["buyer_order"]["order"]["payment"]["amount"] = round(amount_to_pay, 8)
self.contract["buyer_order"] = order_json["buyer_order"]
order = json.dumps(self.contract["buyer_order"]["order"], indent=4)
self.contract["buyer_order"]["signatures"] = {}
self.contract["buyer_order"]["signatures"]["guid"] = \
base64.b64encode(self.keychain.signing_key.sign(order)[:64])
self.contract["buyer_order"]["signatures"]["bitcoin"] = \
bitcointools.encode_sig(*bitcointools.ecdsa_raw_sign(
order, bitcointools.bip32_extract_key(self.keychain.bitcoin_master_privkey)))
return (self.contract["buyer_order"]["order"]["payment"]["address"],
order_json["buyer_order"]["order"]["payment"]["amount"])
def add_order_confirmation(self,
libbitcoin_client,
payout_address,
comments=None,
shipper=None,
tracking_number=None,
est_delivery=None,
url=None,
password=None):
"""
Add the vendor's order confirmation to the contract.
"""
self.blockchain = libbitcoin_client
if not self.testnet and not (payout_address[:1] == "1" or payout_address[:1] == "3"):
raise Exception("Bitcoin address is not a mainnet address")
elif self.testnet and not \
(payout_address[:1] == "n" or payout_address[:1] == "m" or payout_address[:1] == "2"):
raise Exception("Bitcoin address is not a testnet address")
try:
bitcointools.b58check_to_hex(payout_address)
except AssertionError:
raise Exception("Invalid Bitcoin address")
conf_json = {
"vendor_order_confirmation": {
"invoice": {
"ref_hash": digest(json.dumps(self.contract, indent=4)).encode("hex")
}
}
}
if self.contract["vendor_offer"]["listing"]["metadata"]["category"] == "physical good":
shipping = {"shipper": shipper, "tracking_number": tracking_number, "est_delivery": est_delivery}
conf_json["vendor_order_confirmation"]["invoice"]["shipping"] = shipping
elif self.contract["vendor_offer"]["listing"]["metadata"]["category"] == "digital good":
content_source = {"url": url, "password": password}
conf_json["vendor_order_confirmation"]["invoice"]["content_source"] = content_source
if comments:
conf_json["vendor_order_confirmation"]["invoice"]["comments"] = comments
order_id = digest(json.dumps(self.contract, indent=4)).encode("hex")
# apply signatures
outpoints = json.loads(self.db.sales.get_outpoint(order_id))
if "moderator" in self.contract["buyer_order"]["order"]:
redeem_script = self.contract["buyer_order"]["order"]["payment"]["redeem_script"]
tx = BitcoinTransaction.make_unsigned(outpoints, payout_address, testnet=self.testnet)
chaincode = self.contract["buyer_order"]["order"]["payment"]["chaincode"]
masterkey_v = bitcointools.bip32_extract_key(self.keychain.bitcoin_master_privkey)
vendor_priv = derive_childkey(masterkey_v, chaincode, bitcointools.MAINNET_PRIVATE)
sigs = tx.create_signature(vendor_priv, redeem_script)
conf_json["vendor_order_confirmation"]["invoice"]["payout"] = {}
conf_json["vendor_order_confirmation"]["invoice"]["payout"]["address"] = payout_address
conf_json["vendor_order_confirmation"]["invoice"]["payout"]["value"] = tx.get_out_value()
conf_json["vendor_order_confirmation"]["invoice"]["payout"]["signature(s)"] = sigs
else:
tx = BitcoinTransaction.make_unsigned(outpoints, payout_address, testnet=self.testnet)
chaincode = self.contract["buyer_order"]["order"]["payment"]["chaincode"]
masterkey_v = bitcointools.bip32_extract_key(self.keychain.bitcoin_master_privkey)
vendor_priv = derive_childkey(masterkey_v, chaincode, bitcointools.MAINNET_PRIVATE)
tx.sign(vendor_priv)
tx.broadcast(self.blockchain)
self.db.transactions.add_transaction(tx.to_raw_tx())
self.log.info("broadcasting payout tx %s to network" % tx.get_hash())
self.db.sales.update_payment_tx(order_id, tx.get_hash())
confirmation = json.dumps(conf_json["vendor_order_confirmation"]["invoice"], indent=4)
conf_json["vendor_order_confirmation"]["signature"] = \
base64.b64encode(self.keychain.signing_key.sign(confirmation)[:64])
self.contract["vendor_order_confirmation"] = conf_json["vendor_order_confirmation"]
self.db.sales.update_status(order_id, 2)
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "in progress", order_id + ".json")
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(self.contract, indent=4))
def accept_order_confirmation(self, notification_listener, confirmation_json=None):
"""
Validate the order confirmation sent over from the vendor and update our node accordingly.
"""
self.notification_listener = notification_listener
try:
if confirmation_json:
self.contract["vendor_order_confirmation"] = json.loads(confirmation_json,
object_pairs_hook=OrderedDict)
contract_dict = json.loads(json.dumps(self.contract, indent=4), object_pairs_hook=OrderedDict)
del contract_dict["vendor_order_confirmation"]
contract_hash = digest(json.dumps(contract_dict, indent=4)).encode("hex")
ref_hash = self.contract["vendor_order_confirmation"]["invoice"]["ref_hash"]
if ref_hash != contract_hash:
raise Exception("Order number doesn't match")
if self.contract["vendor_offer"]["listing"]["metadata"]["category"] == "physical good":
shipping = self.contract["vendor_order_confirmation"]["invoice"]["shipping"]
if "tracking_number" not in shipping or "shipper" not in shipping:
raise Exception("No shipping information")
# TODO: verify signature
# TODO: verify payout object
status = self.db.purchases.get_status(contract_hash)
if status == 2 or status == 3:
raise Exception("Order confirmation already processed for this contract")
# update the order status in the db
self.db.purchases.update_status(contract_hash, 2)
self.db.purchases.status_changed(contract_hash, 1)
file_path = os.path.join(DATA_FOLDER, "purchases", "in progress", contract_hash + ".json")
# update the contract in the file system
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(self.contract, indent=4))
title = self.contract["vendor_offer"]["listing"]["item"]["title"]
if "image_hashes" in self.contract["vendor_offer"]["listing"]["item"]:
image_hash = unhexlify(self.contract["vendor_offer"]["listing"]["item"]["image_hashes"][0])
else:
image_hash = ""
if "blockchain_id" in self.contract["vendor_offer"]["listing"]["id"]:
handle = self.contract["vendor_offer"]["listing"]["id"]["blockchain_id"]
else:
handle = ""
vendor_guid = self.contract["vendor_offer"]["listing"]["id"]["guid"]
self.notification_listener.notify(vendor_guid, handle, "order confirmation", contract_hash, title,
image_hash)
# Send SMTP notification
notification = SMTPNotification(self.db)
notification.send("[OpenBazaar] Order Confirmed and Shipped",
"You have received an order confirmation.<br><br>"
"Order: %s<br>Vendor: %s<br>Title: %s<br>" % (contract_hash, vendor_guid, title))
return True
except Exception, e:
return e.message
def add_receipt(self,
received,
libbitcoin_client,
feedback=None,
quality=None,
description=None,
delivery_time=None,
customer_service=None,
review="",
dispute=False,
claim=None,
anonymous=True):
"""
Add the final piece of the contract that appends the review and payout transaction.
"""
self.blockchain = libbitcoin_client
contract_dict = json.loads(json.dumps(self.contract, indent=4), object_pairs_hook=OrderedDict)
if "dispute" in contract_dict:
del contract_dict["dispute"]
if "dispute_resolution" in contract_dict:
del contract_dict["dispute_resolution"]
reference_hash = digest(json.dumps(contract_dict, indent=4)).encode("hex")
receipt_json = {
"buyer_receipt": {
"receipt": {
"ref_hash": reference_hash,
"listing": {
"received": received,
"listing_hash": self.contract["buyer_order"]["order"]["ref_hash"]
},
"dispute": {
"dispute": dispute
}
}
}
}
if "vendor_order_confirmation" in self.contract:
order_id = self.contract["vendor_order_confirmation"]["invoice"]["ref_hash"]
else:
order_id = self.get_order_id()
if None not in (feedback, quality, description, delivery_time, customer_service):
address = self.contract["buyer_order"]["order"]["payment"]["address"]
chaincode = self.contract["buyer_order"]["order"]["payment"]["chaincode"]
masterkey_b = self.contract["buyer_order"]["order"]["id"]["pubkeys"]["bitcoin"]
buyer_pub = derive_childkey(masterkey_b, chaincode)
buyer_priv = derive_childkey(bitcointools.bip32_extract_key(self.keychain.bitcoin_master_privkey),
chaincode, bitcointools.MAINNET_PRIVATE)
amount = self.contract["buyer_order"]["order"]["payment"]["amount"]
listing_hash = self.contract["vendor_offer"]["listing"]["contract_id"]
receipt_json["buyer_receipt"]["receipt"]["rating"] = OrderedDict()
receipt_json["buyer_receipt"]["receipt"]["rating"]["tx_summary"] = OrderedDict()
receipt_json["buyer_receipt"]["receipt"]["rating"]["tx_summary"]["feedback"] = feedback
receipt_json["buyer_receipt"]["receipt"]["rating"]["tx_summary"]["quality"] = quality
receipt_json["buyer_receipt"]["receipt"]["rating"]["tx_summary"]["description"] = description
receipt_json["buyer_receipt"]["receipt"]["rating"]["tx_summary"]["delivery_time"] = delivery_time
receipt_json["buyer_receipt"]["receipt"]["rating"]["tx_summary"]["customer_service"] = customer_service
receipt_json["buyer_receipt"]["receipt"]["rating"]["tx_summary"]["review"] = review
receipt_json["buyer_receipt"]["receipt"]["rating"]["tx_summary"]["address"] = address
receipt_json["buyer_receipt"]["receipt"]["rating"]["tx_summary"]["buyer_key"] = buyer_pub
receipt_json["buyer_receipt"]["receipt"]["rating"]["tx_summary"]["amount"] = amount
receipt_json["buyer_receipt"]["receipt"]["rating"]["tx_summary"]["listing"] = listing_hash
receipt_json["buyer_receipt"]["receipt"]["rating"]["tx_summary"]["proof_of_tx"] = \
base64.b64encode(self.db.purchases.get_proof_sig(order_id))
if not anonymous:
receipt_json["buyer_receipt"]["receipt"]["rating"]["tx_summary"]["buyer_guid"] = \
self.keychain.guid.encode("hex")
receipt_json["buyer_receipt"]["receipt"]["rating"]["tx_summary"]["buyer_guid_key"] = \
self.keychain.verify_key.encode(encoder=nacl.encoding.HexEncoder)
status = self.db.purchases.get_status(order_id)
if status < 3 and "moderator" in self.contract["buyer_order"]["order"]:
outpoints = json.loads(self.db.purchases.get_outpoint(order_id))
payout_address = self.contract["vendor_order_confirmation"]["invoice"]["payout"]["address"]
redeem_script = str(self.contract["buyer_order"]["order"]["payment"]["redeem_script"])
value = self.contract["vendor_order_confirmation"]["invoice"]["payout"]["value"]
tx = BitcoinTransaction.make_unsigned(outpoints, payout_address,
testnet=self.testnet, out_value=value)
chaincode = self.contract["buyer_order"]["order"]["payment"]["chaincode"]
masterkey_b = bitcointools.bip32_extract_key(self.keychain.bitcoin_master_privkey)
buyer_priv = derive_childkey(masterkey_b, chaincode, bitcointools.MAINNET_PRIVATE)
buyer_signatures = tx.create_signature(buyer_priv, redeem_script)
signatures = []
for i in range(len(outpoints)):
for vendor_sig in self.contract["vendor_order_confirmation"]["invoice"]["payout"]["signature(s)"]:
if vendor_sig["index"] == i:
v_signature = vendor_sig["signature"]
for buyer_sig in buyer_signatures:
if buyer_sig["index"] == i:
b_signature = buyer_sig["signature"]
signature_obj = {"index": i, "signatures": [b_signature, v_signature]}
signatures.append(signature_obj)
receipt_json["buyer_receipt"]["receipt"]["payout"] = {}
tx.multisign(signatures, redeem_script)
tx.broadcast(self.blockchain)
self.db.transactions.add_transaction(tx.to_raw_tx())
self.log.info("broadcasting payout tx %s to network" % tx.get_hash())
receipt_json["buyer_receipt"]["receipt"]["payout"]["txid"] = tx.get_hash()
receipt_json["buyer_receipt"]["receipt"]["payout"]["signature(s)"] = buyer_signatures
receipt_json["buyer_receipt"]["receipt"]["payout"]["value"] = tx.get_out_value()
if claim:
receipt_json["buyer_receipt"]["receipt"]["dispute"]["claim"] = claim
receipt = json.dumps(receipt_json["buyer_receipt"]["receipt"], indent=4)
receipt_json["buyer_receipt"]["signature"] = \
base64.b64encode(self.keychain.signing_key.sign(receipt)[:64])
self.contract["buyer_receipt"] = receipt_json["buyer_receipt"]
if "rating" in self.contract["buyer_receipt"]["receipt"]:
self.contract["buyer_receipt"]["receipt"]["rating"]["signature"] = \
bitcointools.encode_sig(*bitcointools.ecdsa_raw_sign(json.dumps(
self.contract["buyer_receipt"]["receipt"]["rating"]["tx_summary"], indent=4), buyer_priv))
if not anonymous:
self.contract["buyer_receipt"]["receipt"]["rating"]["guid_signature"] = \
base64.b64encode(self.keychain.signing_key.sign(json.dumps(
self.contract["buyer_receipt"]["receipt"]["rating"]["tx_summary"], indent=4))[:64])
if status < 3:
self.db.purchases.update_status(order_id, 3)
file_path = os.path.join(DATA_FOLDER, "purchases", "trade receipts", order_id + ".json")
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(self.contract, indent=4))
file_path = os.path.join(DATA_FOLDER, "purchases", "in progress", order_id + ".json")
if os.path.exists(file_path):
os.remove(file_path)
else:
file_path = os.path.join(DATA_FOLDER, "purchases", "trade receipts", order_id + ".json")
with open(file_path, 'wb') as outfile:
outfile.write(json.dumps(self.contract, indent=4))
def accept_receipt(self, notification_listener, blockchain, receipt_json=None):
"""
Process the final receipt sent over by the buyer. If valid, broadcast the transaction
to the bitcoin network.
"""
self.notification_listener = notification_listener
self.blockchain = blockchain
if "buyer_receipt" in self.contract:
raise Exception("A receipt has already been processed for this order")
if receipt_json:
self.contract["buyer_receipt"] = json.loads(receipt_json,
object_pairs_hook=OrderedDict)
contract_dict = json.loads(json.dumps(self.contract, indent=4), object_pairs_hook=OrderedDict)
del contract_dict["buyer_receipt"]
if "dispute" in contract_dict:
del contract_dict["dispute"]
if "dispute_resolution" in contract_dict:
del contract_dict["dispute_resolution"]
contract_hash = digest(json.dumps(contract_dict, indent=4)).encode("hex")
ref_hash = self.contract["buyer_receipt"]["receipt"]["ref_hash"]
if ref_hash != contract_hash:
raise Exception("Order number doesn't match")
# TODO: verify buyer signature
if "vendor_order_confirmation" in self.contract:
order_id = self.contract["vendor_order_confirmation"]["invoice"]["ref_hash"]
else:
order_id = self.get_order_id()
status = self.db.sales.get_status(order_id)
if status not in (2, 5, 6):
raise Exception("Can only process a receipt after an order confirmation "
"is sent or a dispute is finalized")
title = self.contract["vendor_offer"]["listing"]["item"]["title"]
if "image_hashes" in self.contract["vendor_offer"]["listing"]["item"]:
image_hash = unhexlify(self.contract["vendor_offer"]["listing"]["item"]["image_hashes"][0])
else:
image_hash = ""
buyer_guid = unhexlify(self.contract["buyer_order"]["order"]["id"]["guid"])
if "blockchain_id" in self.contract["buyer_order"]["order"]["id"]:
handle = self.contract["buyer_order"]["order"]["id"]["blockchain_id"]
else:
handle = ""
if "moderator" in self.contract["buyer_order"]["order"] and status not in (5, 6):
outpoints = json.loads(self.db.sales.get_outpoint(order_id))
payout_address = str(self.contract["vendor_order_confirmation"]["invoice"]["payout"]["address"])
redeem_script = str(self.contract["buyer_order"]["order"]["payment"]["redeem_script"])
value = self.contract["vendor_order_confirmation"]["invoice"]["payout"]["value"]
tx = BitcoinTransaction.make_unsigned(outpoints, payout_address,
testnet=self.testnet, out_value=value)
vendor_sigs = self.contract["vendor_order_confirmation"]["invoice"]["payout"]["signature(s)"]
buyer_sigs = self.contract["buyer_receipt"]["receipt"]["payout"]["signature(s)"]
signatures = []
for i in range(len(outpoints)):
for vendor_sig in vendor_sigs:
if vendor_sig["index"] == i:
v_signature = vendor_sig["signature"]
for buyer_sig in buyer_sigs:
if buyer_sig["index"] == i:
b_signature = buyer_sig["signature"]
signature_obj = {"index": i, "signatures": [b_signature, v_signature]}
signatures.append(signature_obj)
tx.multisign(signatures, redeem_script)
tx.broadcast(self.blockchain)
self.db.transactions.add_transaction(tx.to_raw_tx())
self.log.info("broadcasting payout tx %s to network" % tx.get_hash())
self.db.sales.update_payment_tx(order_id, tx.get_hash())
self.notification_listener.notify(buyer_guid, handle, "rating received", order_id, title, image_hash)
notification_rater = handle if handle else buyer_guid.encode('hex')
notification = SMTPNotification(self.db)
notification.send("[OpenBazaar] New Rating Received",
"You received a new rating from %s for Order #%s - \"%s\". " % (notification_rater,
order_id,
title))
if "rating" in self.contract["buyer_receipt"]["receipt"]:
self.db.ratings.add_rating(self.contract["buyer_receipt"]["receipt"]
["rating"]["tx_summary"]["listing"],
json.dumps(self.contract["buyer_receipt"]["receipt"]["rating"], indent=4))
if status == 2:
self.db.sales.status_changed(order_id, 1)
self.db.sales.update_status(order_id, 3)
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "trade receipts", order_id + ".json")
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(self.contract, indent=4))
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "in progress", order_id + ".json")
if os.path.exists(file_path):
os.remove(file_path)
return order_id
def await_funding(self, notification_listener, libbitcoin_client, proofSig, is_purchase=True):
"""
Saves the contract to the file system and db as an unfunded contract.
Listens on the libbitcoin server for the multisig address to be funded.
"""
self.notification_listener = notification_listener
self.blockchain = libbitcoin_client
self.is_purchase = is_purchase
order_id = digest(json.dumps(self.contract, indent=4)).encode("hex")
payment_address = self.contract["buyer_order"]["order"]["payment"]["address"]
vendor_item = self.contract["vendor_offer"]["listing"]["item"]
if "image_hashes" in vendor_item:
thumbnail_hash = vendor_item["image_hashes"][0]
else:
thumbnail_hash = ""
if "blockchain_id" in self.contract["vendor_offer"]["listing"]["id"] \
and self.contract["vendor_offer"]["listing"]["id"]["blockchain_id"] != "":
vendor = self.contract["vendor_offer"]["listing"]["id"]["blockchain_id"]
else:
vendor = self.contract["vendor_offer"]["listing"]["id"]["guid"]
if "blockchain_id" in self.contract["buyer_order"]["order"]["id"] \
and self.contract["buyer_order"]["order"]["id"]["blockchain_id"] != "":
buyer = self.contract["buyer_order"]["order"]["id"]["blockchain_id"]
else:
buyer = self.contract["buyer_order"]["order"]["id"]["guid"]
if is_purchase:
file_path = os.path.join(DATA_FOLDER, "purchases", "unfunded", order_id + ".json")
self.db.purchases.new_purchase(order_id,
self.contract["vendor_offer"]["listing"]["item"]["title"],
self.contract["vendor_offer"]["listing"]["item"]["description"],
time.time(),
self.contract["buyer_order"]["order"]["payment"]["amount"],
payment_address,
0,
thumbnail_hash,
vendor,
proofSig,
self.contract["vendor_offer"]["listing"]["metadata"]["category"])
else:
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "unfunded", order_id + ".json")
title = self.contract["vendor_offer"]["listing"]["item"]["title"]
description = self.contract["vendor_offer"]["listing"]["item"]["description"]
self.db.sales.new_sale(order_id,
title,
description,
time.time(),
self.contract["buyer_order"]["order"]["payment"]["amount"],
payment_address,
0,
thumbnail_hash,
buyer,
self.contract["vendor_offer"]["listing"]["metadata"]["category"])
try:
notification = SMTPNotification(self.db)
notification.send("[OpenBazaar] Order Received", "Order #%s<br>"
"Buyer: %s<br>"
"BTC Address: %s<br>"
"Title: %s<br>"
"Description: %s<br>"
% (order_id, buyer, payment_address, title, description))
except Exception as e:
self.log.info("Error with SMTP notification: %s" % e.message)
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(self.contract, indent=4))
self.blockchain.subscribe_address(str(payment_address), notification_cb=self.on_tx_received)
def on_tx_received(self, address_version, address_hash, height, block_hash, tx):
"""
Fire when the libbitcoin server tells us we received a payment to this funding address.
While unlikely, a user may send multiple transactions to the funding address to reach the
funding level. We need to keep a running balance and increment it when a new transaction
is received. If the contract is fully funded, we push a notification to the websockets.
"""
try:
# decode the transaction
self.log.info("Bitcoin transaction detected")
transaction = BitcoinTransaction.from_serialized(tx, self.testnet)
# get the amount (in satoshi) the user is expected to pay
amount_to_pay = int(float(self.contract["buyer_order"]["order"]["payment"]["amount"]) * 100000000)
if tx not in self.received_txs: # make sure we aren't parsing the same tx twice.
outpoints = transaction.check_for_funding(
self.contract["buyer_order"]["order"]["payment"]["address"])
if outpoints is not None:
for outpoint in outpoints:
self.amount_funded += outpoint["value"]
self.received_txs.append(tx)
self.outpoints.append(outpoint)
if self.amount_funded >= amount_to_pay: # if fully funded
self.payment_received()
else:
order_id = digest(json.dumps(self.contract, indent=4)).encode("hex")
notification_json = {
"notification": {
"type": "partial payment",
"amount_funded": round(self.amount_funded / float(100000000), 8),
"order_id": order_id
}
}
self.notification_listener.push_ws(notification_json)
except Exception as e:
self.log.critical("Error processing bitcoin transaction: %s" % e.message)
def payment_received(self):
self.blockchain.unsubscribe_address(
self.contract["buyer_order"]["order"]["payment"]["address"], self.on_tx_received)
order_id = digest(json.dumps(self.contract, indent=4)).encode("hex")
title = self.contract["vendor_offer"]["listing"]["item"]["title"]
if "image_hashes" in self.contract["vendor_offer"]["listing"]["item"]:
image_hash = unhexlify(self.contract["vendor_offer"]["listing"]["item"]["image_hashes"][0])
else:
image_hash = ""
if self.is_purchase:
unfunded_path = os.path.join(DATA_FOLDER, "purchases", "unfunded", order_id + ".json")
in_progress_path = os.path.join(DATA_FOLDER, "purchases", "in progress", order_id + ".json")
if "blockchain_id" in self.contract["vendor_offer"]["listing"]["id"]:
handle = self.contract["vendor_offer"]["listing"]["id"]["blockchain_id"]
else:
handle = ""
vendor_guid = self.contract["vendor_offer"]["listing"]["id"]["guid"]
self.notification_listener.notify(unhexlify(vendor_guid), handle, "payment received",
order_id, title, image_hash)
notification = SMTPNotification(self.db)
notification.send("[OpenBazaar] Purchase Payment Received", "Your payment was received.<br><br>"
"Order: %s<br>"
"Vendor: %s<br>"
"Title: %s"
% (order_id, vendor_guid, title))
# update the db
if self.db.purchases.get_status(order_id) == 0:
self.db.purchases.update_status(order_id, 1)
self.db.purchases.update_outpoint(order_id, json.dumps(self.outpoints))
self.log.info("Payment for order id %s successfully broadcast to network." % order_id)
else:
unfunded_path = os.path.join(DATA_FOLDER, "store", "contracts", "unfunded", order_id + ".json")
in_progress_path = os.path.join(DATA_FOLDER, "store", "contracts", "in progress", order_id + ".json")
buyer_guid = self.contract["buyer_order"]["order"]["id"]["guid"]
if "blockchain_id" in self.contract["buyer_order"]["order"]["id"]:
handle = self.contract["buyer_order"]["order"]["id"]["blockchain_id"]
else:
handle = ""
self.notification_listener.notify(unhexlify(buyer_guid), handle, "new order", order_id,
title, image_hash)
notification = SMTPNotification(self.db)
notification.send("[OpenBazaar] Payment for Order Received", "Payment was received for Order #%s."
% order_id)
self.db.sales.update_status(order_id, 1)
self.db.sales.status_changed(order_id, 1)
self.db.sales.update_outpoint(order_id, json.dumps(self.outpoints))
self.log.info("Received new order %s" % order_id)
os.rename(unfunded_path, in_progress_path)
def get_contract_id(self):
return self.contract["vendor_offer"]["listing"]["contract_id"]
def get_order_id(self):
contract_dict = json.loads(json.dumps(self.contract, indent=4), object_pairs_hook=OrderedDict)
if "vendor_order_confirmation" in contract_dict:
del contract_dict["vendor_order_confirmation"]
if "buyer_receipt" in contract_dict:
del contract_dict["buyer_receipt"]
if "dispute" in contract_dict:
del contract_dict["dispute"]
if "dispute_resolution" in contract_dict:
del contract_dict["dispute_resolution"]
return digest(json.dumps(contract_dict, indent=4)).encode("hex")
def check_expired(self):
expiry = self.contract["vendor_offer"]["listing"]["metadata"]["expiry"]
if expiry == "never":
return False
elif datetime.strptime(expiry[:len(expiry)-4], '%Y-%m-%dT%H:%M') < datetime.utcnow():
return True
else:
return False
def delete(self, delete_images=False):
"""
Deletes the contract json from the OpenBazaar directory as well as the listing
metadata from the db and all the related images in the file system.
"""
# get the file path
file_path = self.db.filemap.get_file(self.contract["vendor_offer"]["listing"]["contract_id"])
# maybe delete the images from disk
if "image_hashes" in self.contract["vendor_offer"]["listing"]["item"] and delete_images:
for image_hash in self.contract["vendor_offer"]["listing"]["item"]["image_hashes"]:
# delete from disk
image_path = self.db.filemap.get_file(image_hash)
if os.path.exists(image_path):
os.remove(image_path)
# remove pointer to the image from the filemap
self.db.filemap.delete(image_hash)
# delete the contract from disk
if os.path.exists(file_path):
os.remove(file_path)
# delete the listing metadata from the db
contract_hash = unhexlify(self.contract["vendor_offer"]["listing"]["contract_id"])
self.db.listings.delete_listing(contract_hash)
# remove the pointer to the contract from the filemap
self.db.filemap.delete(contract_hash.encode("hex"))
def save(self):
"""
Saves the json contract into the OpenBazaar/store/listings/contracts/ directory.
It uses the title as the file name so it's easy on human eyes. A mapping of the
hash of the contract and file path is stored in the database so we can retrieve
the contract with only its hash.
Additionally, the contract metadata (sent in response to the GET_LISTINGS query)
is saved in the db for fast access.
"""
# get the contract title to use as the file name and format it
file_name = str(self.contract["vendor_offer"]["listing"]["item"]["title"][:100])
file_name = re.sub(r"[^\w\s]", '', file_name)
file_name = re.sub(r"\s+", '_', file_name)
file_name += str(self.contract["vendor_offer"]["listing"]["contract_id"])[:8]
# save the json contract to the file system
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "listings", file_name + ".json")
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(self.contract, indent=4))
if self.previous_title and self.previous_title != self.contract["vendor_offer"]["listing"]["item"]["title"]:
if isinstance(self.previous_title, unicode):
self.previous_title = self.previous_title.encode('utf8')
old_name = str(self.previous_title[:100])
old_name = re.sub(r"[^\w\s]", '', file_name)
old_name = re.sub(r"\s+", '_', file_name)
old_name += str(self.contract["vendor_offer"]["listing"]["contract_id"])[:8]
old_path = os.path.join(DATA_FOLDER, "store", "contracts", "listings", old_name + ".json")
if os.path.exists(old_path):
os.remove(old_path)
# Create a `ListingMetadata` protobuf object using data from the full contract
listings = Listings()
data = listings.ListingMetadata()
data.contract_hash = unhexlify(self.contract["vendor_offer"]["listing"]["contract_id"])
vendor_item = self.contract["vendor_offer"]["listing"]["item"]
data.title = vendor_item["title"]
if "image_hashes" in vendor_item:
data.thumbnail_hash = unhexlify(vendor_item["image_hashes"][0])
if "category" in vendor_item:
data.category = vendor_item["category"]
if "bitcoin" not in vendor_item["price_per_unit"]:
data.price = float(vendor_item["price_per_unit"]["fiat"]["price"])
data.currency_code = vendor_item["price_per_unit"]["fiat"][
"currency_code"]
else:
data.price = round(float(vendor_item["price_per_unit"]["bitcoin"]), 8)
data.currency_code = "BTC"
data.nsfw = vendor_item["nsfw"]
if "shipping" not in self.contract["vendor_offer"]["listing"]:
data.origin = CountryCode.Value("NA")
else:
data.origin = CountryCode.Value(
self.contract["vendor_offer"]["listing"]["shipping"]["shipping_origin"].upper())
for region in self.contract["vendor_offer"]["listing"]["shipping"]["shipping_regions"]:
data.ships_to.append(CountryCode.Value(region.upper()))
if self.contract["vendor_offer"]["listing"]["metadata"]["category"].lower() == "physical good":
data.contract_type = listings.PHYSICAL_GOOD
elif self.contract["vendor_offer"]["listing"]["metadata"]["category"].lower() == "digital good":
data.contract_type = listings.DIGITAL_GOOD
elif self.contract["vendor_offer"]["listing"]["metadata"]["category"].lower() == "service":
data.contract_type = listings.SERVICE
data.last_modified = int(time.time())
# save the mapping of the contract file path and contract hash in the database
self.db.filemap.insert(data.contract_hash.encode("hex"), file_path[len(DATA_FOLDER):])
# save the `ListingMetadata` protobuf to the database as well
self.db.listings.add_listing(data)
def process_refund(self, refund_json, blockchain, notification_listener):
if "refund" in self.contract:
raise Exception("Refund already processed for this order")
self.contract["refund"] = refund_json["refund"]
order_id = refund_json["refund"]["order_id"]
if "txid" not in refund_json["refund"]:
outpoints = json.loads(self.db.purchases.get_outpoint(order_id))
refund_address = self.contract["buyer_order"]["order"]["refund_address"]
redeem_script = self.contract["buyer_order"]["order"]["payment"]["redeem_script"]
in_value = 0
for outpoint in outpoints:
in_value += outpoint["value"]
out_value = in_value - long(self.contract["buyer_order"]["order"]["payment"]["refund_tx_fee"])
tx = BitcoinTransaction.make_unsigned(outpoints, refund_address,
testnet=self.testnet,
out_value=out_value)
chaincode = self.contract["buyer_order"]["order"]["payment"]["chaincode"]
masterkey_b = bitcointools.bip32_extract_key(KeyChain(self.db).bitcoin_master_privkey)
buyer_priv = derive_childkey(masterkey_b, chaincode, bitcointools.MAINNET_PRIVATE)
buyer_sigs = tx.create_signature(buyer_priv, redeem_script)
vendor_sigs = refund_json["refund"]["signature(s)"]
signatures = []
for i in range(len(outpoints)):
for vendor_sig in vendor_sigs:
if vendor_sig["index"] == i:
v_signature = vendor_sig["signature"]
for buyer_sig in buyer_sigs:
if buyer_sig["index"] == i:
b_signature = buyer_sig["signature"]
signature_obj = {"index": i, "signatures": [b_signature, v_signature]}
signatures.append(signature_obj)
tx.multisign(signatures, redeem_script)
tx.broadcast(blockchain)
self.db.transactions.add_transaction(tx.to_raw_tx())
self.log.info("broadcasting refund tx %s to network" % tx.get_hash())
self.db.purchases.update_status(order_id, 7)
self.db.purchases.status_changed(order_id, 1)
file_path = os.path.join(DATA_FOLDER, "purchases", "trade receipts", order_id + ".json")
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(self.contract, indent=4))
file_path = os.path.join(DATA_FOLDER, "purchases", "in progress", order_id + ".json")
if os.path.exists(file_path):
os.remove(file_path)
title = self.contract["vendor_offer"]["listing"]["item"]["title"]
if "image_hashes" in self.contract["vendor_offer"]["listing"]["item"]:
image_hash = unhexlify(self.contract["vendor_offer"]["listing"]["item"]["image_hashes"][0])
else:
image_hash = ""
buyer_guid = self.contract["buyer_order"]["order"]["id"]["guid"]
if "blockchain_id" in self.contract["buyer_order"]["order"]["id"]:
handle = self.contract["buyer_order"]["order"]["id"]["blockchain_id"]
else:
handle = ""
notification_listener.notify(buyer_guid, handle, "refund", order_id, title, image_hash)
notification = SMTPNotification(self.db)
notification.send("[OpenBazaar] Refund Received", "You received a refund.<br><br>"
"Order: %s<br>Title: %s"
% (order_id, title))
def verify(self, sender_key):
"""
Validate that an order sent over by a buyer is filled out correctly.
"""
SelectParams("testnet" if self.testnet else "mainnet")
try:
contract_dict = json.loads(json.dumps(self.contract, indent=4), object_pairs_hook=OrderedDict)
del contract_dict["buyer_order"]
contract_hash = digest(json.dumps(contract_dict, indent=4))
ref_hash = unhexlify(self.contract["buyer_order"]["order"]["ref_hash"])
contract_id = self.contract["vendor_offer"]["listing"]["contract_id"]
# verify that the reference hash matches the contract and that the contract actually exists
if contract_hash != ref_hash or not self.db.filemap.get_file(contract_id):
raise Exception("Order for contract that doesn't exist")
# verify the vendor's own signature
verify_key = self.keychain.signing_key.verify_key
verify_key.verify(json.dumps(self.contract["vendor_offer"]["listing"], indent=4),
base64.b64decode(self.contract["vendor_offer"]["signatures"]["guid"]))
# verify timestamp is within a reasonable time from now
timestamp = self.contract["buyer_order"]["order"]["date"]
dt = datetime.strptime(timestamp[:len(timestamp)-4], "%Y-%m-%d %H:%M:%S.%f")
if abs((datetime.utcnow() - dt).total_seconds()) > 600:
raise Exception("Timestamp on order not within 10 minutes of now")
# verify the signatures on the order
verify_obj = json.dumps(self.contract["buyer_order"]["order"], indent=4)
verify_key = nacl.signing.VerifyKey(sender_key)
verify_key.verify(verify_obj, base64.b64decode(self.contract["buyer_order"]["signatures"]["guid"]))
bitcoin_key = self.contract["buyer_order"]["order"]["id"]["pubkeys"]["bitcoin"]
bitcoin_sig = self.contract["buyer_order"]["signatures"]["bitcoin"]
valid = bitcointools.ecdsa_raw_verify(verify_obj, bitcointools.decode_sig(bitcoin_sig), bitcoin_key)
if not valid:
raise Exception("Invalid Bitcoin signature")
# verify buyer included the correct bitcoin amount for payment
quantity = int(self.contract["buyer_order"]["order"]["quantity"])
price_json = self.contract["vendor_offer"]["listing"]["item"]["price_per_unit"]
if "bitcoin" in price_json:
asking_price = float(price_json["bitcoin"]) * quantity
else:
currency_code = price_json["fiat"]["currency_code"]
fiat_price = price_json["fiat"]["price"]
conversion_rate = BtcPrice.instance().get(currency_code.upper())
asking_price = float("{0:.8f}".format(float(fiat_price) / float(conversion_rate))) * quantity
if "shipping" in self.contract["vendor_offer"]["listing"]:
if not self.contract["vendor_offer"]["listing"]["shipping"]["free"]:
shipping_origin = self.contract["vendor_offer"]["listing"]["shipping"][
"shipping_origin"].upper()
if shipping_origin == self.contract["buyer_order"]["order"]["shipping"]["country"].upper():
if "bitcoin" in self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]:
shipping_amount = float(self.contract["vendor_offer"]["listing"]["shipping"][
"flat_fee"]["bitcoin"]["domestic"]) * quantity
else:
price = self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["fiat"][
"price"]["domestic"]
currency = self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"][
"fiat"]["currency_code"]
conversion_rate = BtcPrice.instance().get(currency.upper(), False)
shipping_amount = float("{0:.8f}".format(float(price) /
float(conversion_rate))) * quantity
else:
if "bitcoin" in self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]:
shipping_amount = float(self.contract["vendor_offer"]["listing"]["shipping"][
"flat_fee"]["bitcoin"]["international"]) * quantity
else:
price = self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["fiat"][
"price"]["international"]
currency = self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"][
"fiat"]["currency_code"]
conversion_rate = BtcPrice.instance().get(currency.upper(), False)
shipping_amount = float("{0:.8f}".format(float(price) /
float(conversion_rate))) * quantity
asking_price += shipping_amount
print round(float(asking_price), 8), float(self.contract["buyer_order"]["order"]["payment"]["amount"])
if round(float(asking_price), 8) > float(self.contract["buyer_order"]["order"]["payment"]["amount"]):
raise Exception("Insuffient Payment")
if "moderator" in self.contract["buyer_order"]["order"]:
# verify a valid moderator was selected
valid_mod = False
for mod in self.contract["vendor_offer"]["listing"]["moderators"]:
if mod["guid"] == self.contract["buyer_order"]["order"]["moderator"]:
valid_mod = True
if not valid_mod:
raise Exception("Invalid moderator")
# verify redeem script
chaincode = self.contract["buyer_order"]["order"]["payment"]["chaincode"]
for mod in self.contract["vendor_offer"]["listing"]["moderators"]:
if mod["guid"] == self.contract["buyer_order"]["order"]["moderator"]:
masterkey_m = mod["pubkeys"]["bitcoin"]["key"]
masterkey_b = self.contract["buyer_order"]["order"]["id"]["pubkeys"]["bitcoin"]
masterkey_v = bitcointools.bip32_extract_key(self.keychain.bitcoin_master_pubkey)
buyer_key = unhexlify(derive_childkey(masterkey_b, chaincode))
vendor_key = unhexlify(derive_childkey(masterkey_v, chaincode))
moderator_key = unhexlify(derive_childkey(masterkey_m, chaincode))
redeem_script = CScript([OP_2, buyer_key, vendor_key, moderator_key, OP_3, OP_CHECKMULTISIG])
if redeem_script.encode("hex") != self.contract["buyer_order"]["order"]["payment"]["redeem_script"]:
raise Exception("Invalid redeem script")
# verify the multisig payment address
payment_address = str(P2SHBitcoinAddress.from_redeemScript(redeem_script))
if payment_address != self.contract["buyer_order"]["order"]["payment"]["address"]:
raise Exception("Incorrect payment address")
else:
# verify the direct payment address
chaincode = self.contract["buyer_order"]["order"]["payment"]["chaincode"]
masterkey_v = bitcointools.bip32_extract_key(self.keychain.bitcoin_master_pubkey)
vendor_key = unhexlify(derive_childkey(masterkey_v, chaincode))
# verify the payment address
payment_address = str(P2PKHBitcoinAddress.from_pubkey(vendor_key))
if payment_address != self.contract["buyer_order"]["order"]["payment"]["address"]:
raise Exception("Incorrect payment address")
# verify all the shipping fields exist
if self.contract["vendor_offer"]["listing"]["metadata"]["category"] == "physical good":
shipping = self.contract["buyer_order"]["order"]["shipping"]
keys = ["ship_to", "address", "postal_code", "city", "state", "country"]
for value in map(shipping.get, keys):
if value is None:
raise Exception("Missing shipping field")
# verify buyer ID
pubkeys = self.contract["buyer_order"]["order"]["id"]["pubkeys"]
keys = ["guid", "bitcoin"]
for value in map(pubkeys.get, keys):
if value is None:
raise Exception("Missing pubkey field")
return True
except Exception, e:
return e.message
def validate_for_moderation(self, proof_sig):
validation_failures = []
tmp_contract = deepcopy(self.contract)
if "buyer_order" in tmp_contract:
del tmp_contract["buyer_order"]
if "vendor_order_confirmation" in tmp_contract:
del tmp_contract["vendor_order_confirmation"]
if "buyer_receipt" in tmp_contract:
del tmp_contract["buyer_receipt"]
del tmp_contract["dispute"]
contract_hash = digest(json.dumps(tmp_contract, indent=4))
ref_hash = unhexlify(self.contract["buyer_order"]["order"]["ref_hash"])
listing = json.dumps(self.contract["vendor_offer"]["listing"], indent=4)
# verify that the reference hash matches the contract
if contract_hash != ref_hash:
validation_failures.append("Reference hash in buyer_order doesn't match the listing hash;")
# validated the signatures on vendor_offer
vendor_guid_signature = self.contract["vendor_offer"]["signatures"]["guid"]
vendor_bitcoin_signature = self.contract["vendor_offer"]["signatures"]["bitcoin"]
vendor_guid_pubkey = unhexlify(self.contract["vendor_offer"]["listing"]["id"]["pubkeys"]["guid"])
vendor_bitcoin_pubkey = self.contract["vendor_offer"]["listing"]["id"]["pubkeys"]["bitcoin"]
verify_key = nacl.signing.VerifyKey(vendor_guid_pubkey)
try:
verify_key.verify(listing, base64.b64decode(vendor_guid_signature))
except Exception:
validation_failures.append("Guid signature in vendor_offer not valid;")
valid = bitcointools.ecdsa_raw_verify(listing,
bitcointools.decode_sig(vendor_bitcoin_signature),
vendor_bitcoin_pubkey)
if not valid:
validation_failures.append("Bitcoin signature in vendor_offer is not valid;")
# verify the signatures on the order
order = json.dumps(self.contract["buyer_order"]["order"], indent=4)
buyer_guid_signature = self.contract["buyer_order"]["signatures"]["guid"]
buyer_bitcoin_signature = self.contract["buyer_order"]["signatures"]["bitcoin"]
buyer_bitcoin_pubkey = self.contract["buyer_order"]["order"]["id"]["pubkeys"]["bitcoin"]
buyer_guid_pubkey = unhexlify(self.contract["buyer_order"]["order"]["id"]["pubkeys"]["guid"])
verify_key = nacl.signing.VerifyKey(buyer_guid_pubkey)
try:
verify_key.verify(order, base64.b64decode(buyer_guid_signature))
except Exception:
validation_failures.append("Guid signature in buyer_order not valid;")
valid = bitcointools.ecdsa_raw_verify(order, bitcointools.decode_sig(buyer_bitcoin_signature),
buyer_bitcoin_pubkey)
if not valid:
validation_failures.append("Bitcoin signature in buyer_order not valid;")
# If the buyer filed this claim, check the vendor's signature to show he accepted the order.
if proof_sig is not None:
address = self.contract["buyer_order"]["order"]["payment"]["address"]
chaincode = self.contract["buyer_order"]["order"]["payment"]["chaincode"]
masterkey_b = self.contract["buyer_order"]["order"]["id"]["pubkeys"]["bitcoin"]
buyer_key = derive_childkey(masterkey_b, chaincode)
amount = self.contract["buyer_order"]["order"]["payment"]["amount"]
listing_hash = self.contract["vendor_offer"]["listing"]["contract_id"]
verify_key = nacl.signing.VerifyKey(vendor_guid_pubkey)
try:
verify_key.verify(str(address) + str(amount) + str(listing_hash) + str(buyer_key),
base64.b64decode(proof_sig))
except Exception:
validation_failures.append("Vendor's order-acceptance signature not valid;")
# verify redeem script
chaincode = self.contract["buyer_order"]["order"]["payment"]["chaincode"]
for mod in self.contract["vendor_offer"]["listing"]["moderators"]:
if mod["guid"] == self.contract["buyer_order"]["order"]["moderator"]:
masterkey_m = mod["pubkeys"]["bitcoin"]["key"]
if masterkey_m != bitcointools.bip32_extract_key(self.keychain.bitcoin_master_pubkey):
validation_failures.append("Moderator Bitcoin key doesn't match key in vendor_order;")
masterkey_b = self.contract["buyer_order"]["order"]["id"]["pubkeys"]["bitcoin"]
masterkey_v = self.contract["vendor_offer"]["listing"]["id"]["pubkeys"]["bitcoin"]
buyer_key = derive_childkey(masterkey_b, chaincode)
vendor_key = derive_childkey(masterkey_v, chaincode)
moderator_key = derive_childkey(masterkey_m, chaincode)
redeem_script = bitcointools.mk_multisig_script([buyer_key, vendor_key, moderator_key], 2)
if redeem_script != self.contract["buyer_order"]["order"]["payment"]["redeem_script"]:
validation_failures.append("Bitcoin redeem script not valid for the keys in this contract;")
# verify address from redeem script
if self.testnet:
payment_address = bitcointools.p2sh_scriptaddr(redeem_script, 196)
else:
payment_address = bitcointools.p2sh_scriptaddr(redeem_script)
if self.contract["buyer_order"]["order"]["payment"]["address"] != payment_address:
validation_failures.append("Bitcoin address invalid. Cannot be derived from reddem script;")
# validate vendor_order_confirmation
if "vendor_order_confirmation" in self.contract:
contract_dict = json.loads(json.dumps(self.contract, indent=4), object_pairs_hook=OrderedDict)
del contract_dict["vendor_order_confirmation"]
if "buyer_receipt" in contract_dict:
del contract_dict["buyer_receipt"]
contract_hash = digest(json.dumps(contract_dict, indent=4)).encode("hex")
ref_hash = self.contract["vendor_order_confirmation"]["invoice"]["ref_hash"]
if ref_hash != contract_hash:
validation_failures.append("Reference hash in vendor_order_confirmation does not match order ID;")
vendor_signature = self.contract["vendor_order_confirmation"]["signature"]
confirmation = json.dumps(self.contract["vendor_order_confirmation"]["invoice"], indent=4)
verify_key = nacl.signing.VerifyKey(vendor_guid_pubkey)
try:
verify_key.verify(confirmation, base64.b64decode(vendor_signature))
except Exception:
validation_failures.append("Vendor's signature in vendor_order_confirmation not valid;")
# check the moderator fee is correct
own_guid = self.keychain.guid.encode("hex")
for moderator in self.contract["vendor_offer"]["listing"]["moderators"]:
if moderator["guid"] == own_guid:
fee = float(moderator["fee"][:len(moderator["fee"]) -1])
if Profile(self.db).get().moderation_fee < fee:
validation_failures.append("Moderator fee in contract less than current moderation fee;")
return validation_failures
def __repr__(self):
return json.dumps(self.contract, indent=4)
def check_unfunded_for_payment(db, libbitcoin_client, notification_listener, testnet=False):
"""
Run through the unfunded contracts in our database and query the
libbitcoin server to see if they received a payment.
"""
current_time = time.time()
purchases = db.purchases.get_unfunded()
for purchase in purchases:
if current_time - purchase[1] <= 86400:
check_order_for_payment(purchase[0], db, libbitcoin_client, notification_listener, testnet)
sales = db.sales.get_unfunded()
for sale in sales:
if current_time - sale[1] <= 86400:
check_order_for_payment(sale[0], db, libbitcoin_client, notification_listener, testnet)
def check_order_for_payment(order_id, db, libbitcoin_client, notification_listener, testnet=False):
try:
if os.path.exists(os.path.join(DATA_FOLDER, "purchases", "unfunded", order_id + ".json")):
file_path = os.path.join(DATA_FOLDER, "purchases", "unfunded", order_id + ".json")
is_purchase = True
elif os.path.exists(os.path.join(DATA_FOLDER, "store", "contracts", "unfunded", order_id + ".json")):
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "unfunded", order_id + ".json")
is_purchase = False
with open(file_path, 'r') as filename:
order = json.load(filename, object_pairs_hook=OrderedDict)
c = Contract(db, contract=order, testnet=testnet)
c.blockchain = libbitcoin_client
c.notification_listener = notification_listener
c.is_purchase = is_purchase
addr = c.contract["buyer_order"]["order"]["payment"]["address"]
SelectParams("testnet" if testnet else "mainnet")
script_pubkey = CBitcoinAddress(addr).to_scriptPubKey().encode("hex")
def history_fetched(ec, history):
if not ec:
# pylint: disable=W0612
# pylint: disable=W0640
amount_funded = 0
outpoints = []
for objid, txhash, index, height, value in history:
amount_funded += value
o = {
"txid": txhash.encode("hex"),
"vout": index,
"value": value,
"scriptPubKey": script_pubkey
}
outpoints.append(o)
# get the amount (in satoshi) the user is expected to pay
amount_to_pay = int(float(c.contract["buyer_order"]["order"]["payment"]["amount"]) * 100000000)
if amount_funded >= amount_to_pay:
c.outpoints = outpoints
c.payment_received()
libbitcoin_client.fetch_history2(addr, history_fetched)
except Exception:
pass
| {
"repo_name": "tyler-smith/OpenBazaar-Server",
"path": "market/contracts.py",
"copies": "1",
"size": "81381",
"license": "mit",
"hash": 617496222747076400,
"line_mean": 54.2860054348,
"line_max": 116,
"alpha_frac": 0.5600078642,
"autogenerated": false,
"ratio": 4.3051896524361215,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5365197516636122,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import json
from binascii import unhexlify, hexlify
from collections import OrderedDict
import re
import os
from protos.objects import Listings
from protos.countries import CountryCode
from dht.utils import digest
from constants import DATA_FOLDER
from db.datastore import HashMap, ListingsStore
from market.profile import Profile
from keyutils.keys import KeyChain
class Contract(object):
"""
A class for creating and interacting with OpenBazaar Ricardian contracts.
"""
def __init__(self, contract=None, hash_value=None):
"""
This class can be instantiated with either an `OrderedDict` or a hash
of a contract. If a hash is used, we will load the contract from either
the file system or cache.
Alternatively, pass in no parameters if the intent is to create a new
contract.
Args:
contract: an `OrderedDict` containing a filled out json contract
hash: a hash (in raw bytes) of a contract
"""
if contract is not None:
self.contract = contract
elif hash_value is not None:
try:
file_path = HashMap().get_file(hash_value)
if file_path is None:
file_path = DATA_FOLDER + "cache/" + hexlify(hash_value)
with open(file_path, 'r') as filename:
self.contract = json.load(filename, object_pairs_hook=OrderedDict)
except Exception:
self.contract = {}
else:
self.contract = {}
def create(self,
expiration_date,
metadata_category,
title,
description,
currency_code,
price,
process_time,
nsfw,
est_delivery_domestic=None,
est_delivery_international=None,
shipping_origin=None,
shipping_regions=None,
keywords=None,
category=None,
condition=None,
sku=None,
images=None,
free_shipping=None,
shipping_currency_code=None,
shipping_domestic=None,
shipping_international=None):
"""
All parameters are strings except:
:param expiration_date: `string` (must be formatted UTC datetime)
:param keywords: `list`
:param nsfw: `boolean`
:param images: a `list` of image files
:param free_shipping: `boolean`
:param shipping_origin: a 'string' formatted `CountryCode`
:param shipping_regions: a 'list' of 'string' formatted `CountryCode`s
"""
# TODO: import keys into the contract, import moderator information from db, sign contract.
profile = Profile().get()
keychain = KeyChain()
self.contract = OrderedDict(
{
"vendor_offer": {
"listing": {
"metadata": {
"version": "0.1",
"expiry": expiration_date + " UTC",
"category": metadata_category,
"category_sub": "fixed price"
},
"id": {
"guid": keychain.guid.encode("hex"),
"pubkeys": {
"guid": keychain.guid_signed_pubkey[64:].encode("hex"),
"bitcoin": keychain.bitcoin_master_pubkey
}
},
"item": {
"title": title,
"description": description,
"process_time": process_time,
"price_per_unit": {},
"nsfw": nsfw
}
}
}
}
)
if metadata_category == "physical good" and condition is not None:
self.contract["vendor_offer"]["listing"]["item"]["condition"] = condition
if currency_code.upper() == "BTC":
item = self.contract["vendor_offer"]["listing"]["item"]
item["price_per_unit"]["bitcoin"] = price
else:
item = self.contract["vendor_offer"]["listing"]["item"]
item["price_per_unit"]["fiat"]["price"] = price
item["price_per_unit"]["fiat"]["currency_code"] = currency_code
if keywords is not None:
self.contract["vendor_offer"]["listing"]["item"]["keywords"] = []
self.contract["vendor_offer"]["listing"]["item"]["keywords"].extend(keywords)
if category is not None:
self.contract["vendor_offer"]["listing"]["item"]["category"] = category
if sku is not None:
self.contract["vendor_offer"]["listing"]["item"]["sku"] = sku
if metadata_category == "physical good":
self.contract["vendor_offer"]["listing"]["shipping"] = {}
shipping = self.contract["vendor_offer"]["listing"]["shipping"]
shipping["shipping_origin"] = shipping_origin
if free_shipping is False:
self.contract["vendor_offer"]["listing"]["shipping"]["free"] = False
self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"] = {}
if shipping_currency_code == "BTC":
self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["bitcoin"] = {}
self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["bitcoin"][
"domestic"] = shipping_domestic
self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["bitcoin"][
"international"] = shipping_international
else:
shipping = self.contract["vendor_offer"]["listing"]["shipping"]
shipping["flat_fee"]["fiat"] = {}
shipping["flat_fee"]["fiat"]["price"] = {}
shipping["flat_fee"]["fiat"]["price"][
"domestic"] = shipping_domestic
shipping["flat_fee"]["fiat"]["price"][
"international"] = shipping_international
shipping["flat_fee"]["fiat"][
"currency_code"] = shipping_currency_code
else:
self.contract["vendor_offer"]["listing"]["shipping"]["free"] = True
self.contract["vendor_offer"]["listing"]["shipping"]["shipping_regions"] = []
for region in shipping_regions:
shipping = self.contract["vendor_offer"]["listing"]["shipping"]
shipping["shipping_regions"].append(region)
listing = self.contract["vendor_offer"]["listing"]
listing["shipping"]["est_delivery"] = {}
listing["shipping"]["est_delivery"]["domestic"] = est_delivery_domestic
listing["shipping"]["est_delivery"][
"international"] = est_delivery_international
if profile.HasField("handle"):
self.contract["vendor_offer"]["listing"]["id"]["blockchain_id"] = profile.handle
if images is not None:
self.contract["vendor_offer"]["listing"]["item"]["image_hashes"] = []
for image in images:
hash_value = digest(image).encode("hex")
self.contract["vendor_offer"]["listing"]["item"]["image_hashes"].append(hash_value)
with open(DATA_FOLDER + "store/media/" + hash_value, 'w') as outfile:
outfile.write(image)
HashMap().insert(digest(image), DATA_FOLDER + "store/media/" + hash_value)
self.save()
def update(self,
expiration_date=None,
metadata_category=None,
title=None,
description=None,
currency_code=None,
price=None,
process_time=None,
nsfw=None,
est_delivery_domestic=None,
est_delivery_international=None,
shipping_origin=None,
shipping_regions=None,
keywords=None,
category=None,
condition=None,
sku=None,
image_hashes=None, # if intending to delete an image, pass in
# the hashes that are staying.
images=None, # to add new images pass in a list of image files.
free_shipping=None,
shipping_currency_code=None,
shipping_domestic=None,
shipping_international=None):
self.delete(False)
vendor_listing = self.contract["vendor_offer"]["listing"]
if expiration_date is not None:
vendor_listing["item"]["expiry"] = expiration_date
if metadata_category is not None:
vendor_listing["metadata"]["category"] = metadata_category
if metadata_category != "physical good" and vendor_listing["metadata"][
"category"] == "physical good":
del vendor_listing["shipping"]
elif metadata_category == "physical good" and vendor_listing["metadata"][
"category"] != "physical good":
vendor_listing["shipping"] = {}
vendor_listing["shipping"]["est_delivery"] = {}
vendor_listing["shipping"]["free"] = False
if title is not None:
vendor_listing["item"]["title"] = title
if description is not None:
vendor_listing["item"]["description"] = description
if currency_code is not None:
if currency_code.upper() != "BTC" and "bitcoin" \
in vendor_listing["item"]["price_per_unit"]:
p = vendor_listing["item"]["price_per_unit"]["bitcoin"]
del vendor_listing["item"]["price_per_unit"]["bitcoin"]
vendor_listing["item"]["price_per_unit"]["fiat"] = {}
vendor_listing["item"]["price_per_unit"]["fiat"][
"currency_code"] = currency_code
vendor_listing["item"]["price_per_unit"]["fiat"]["price"] = p
elif currency_code.upper() == "BTC" and "fiat" in \
vendor_listing["item"]["price_per_unit"]:
p = vendor_listing["item"]["price_per_unit"]["fiat"]["price"]
del vendor_listing["item"]["price_per_unit"]["fiat"]
vendor_listing["item"]["price_per_unit"]["bitcoin"] = p
if price is not None:
if "bitcoin" in vendor_listing["item"]["price_per_unit"]:
vendor_listing["item"]["price_per_unit"]["bitcoin"] = price
else:
vendor_listing["item"]["price_per_unit"]["fiat"]["price"] = price
if process_time is not None:
vendor_listing["item"]["process_time"] = process_time
if nsfw is not None:
vendor_listing["item"]["nsfw"] = nsfw
if keywords is not None:
vendor_listing["item"]["keywords"] = []
vendor_listing["item"]["keywords"].extend(keywords)
if category is not None:
vendor_listing["item"]["category"] = category
if image_hashes is not None:
to_delete = list(set(vendor_listing["item"]["image_hashes"]) - set(image_hashes))
for image_hash in to_delete:
# delete from disk
h = HashMap()
image_path = h.get_file(unhexlify(image_hash))
if os.path.exists(image_path):
os.remove(image_path)
# remove pointer to the image from the HashMap
h.delete(unhexlify(image_hash))
vendor_listing["item"]["image_hashes"] = []
vendor_listing["item"]["image_hashes"].extend(image_hashes)
if images is not None:
if "image_hashes" not in vendor_listing["item"]:
vendor_listing["item"]["image_hashes"] = []
for image in images:
hash_value = digest(image).encode("hex")
vendor_listing["item"]["image_hashes"].append(hash_value)
with open(DATA_FOLDER + "store/media/" + hash_value, 'w') as outfile:
outfile.write(image)
HashMap().insert(digest(image), DATA_FOLDER + "store/media/" + hash_value)
if vendor_listing["metadata"]["category"] == "physical good" and condition is not None:
vendor_listing["item"]["condition"] = condition
if sku is not None:
vendor_listing["item"]["sku"] = sku
if vendor_listing["metadata"]["category"] == "physical good":
if shipping_origin is not None:
vendor_listing["shipping"]["shipping_origin"] = shipping_origin
if free_shipping is not None:
if free_shipping is True and vendor_listing["shipping"]["free"] is False:
vendor_listing["shipping"]["free"] = True
del vendor_listing["shipping"]["flat_fee"]
elif free_shipping is False and vendor_listing["shipping"]["free"] is True:
vendor_listing["shipping"]["flat_fee"] = {}
vendor_listing["shipping"]["flat_fee"]["bitcoin"] = {}
vendor_listing["shipping"]["free"] = False
if shipping_currency_code is not None and vendor_listing["shipping"]["free"] is False:
if shipping_currency_code == "BTC" and "bitcoin" not in \
vendor_listing["shipping"]["flat_fee"]:
vendor_listing["shipping"]["flat_fee"]["bitcoin"] = {}
d = vendor_listing["shipping"]["flat_fee"]["fiat"]["price"]["domestic"]
i = vendor_listing["shipping"]["flat_fee"]["fiat"]["price"][
"international"]
vendor_listing["shipping"]["flat_fee"]["bitcoin"]["domestic"] = d
vendor_listing["shipping"]["flat_fee"]["bitcoin"]["international"] = i
del vendor_listing["shipping"]["flat_fee"]["fiat"]
elif shipping_currency_code != "BTC" and "bitcoin" in \
vendor_listing["shipping"]["flat_fee"]:
d = vendor_listing["shipping"]["flat_fee"]["bitcoin"]["domestic"]
i = vendor_listing["shipping"]["flat_fee"]["bitcoin"]["international"]
vendor_listing["shipping"]["flat_fee"]["fiat"] = {}
vendor_listing["shipping"]["flat_fee"]["fiat"]["price"] = {}
vendor_listing["shipping"]["flat_fee"]["fiat"]["price"]["domestic"] = d
vendor_listing["shipping"]["flat_fee"]["fiat"]["price"][
"international"] = i
vendor_listing["shipping"]["flat_fee"]["fiat"][
"currency_code"] = shipping_currency_code
del vendor_listing["shipping"]["flat_fee"]["bitcoin"]
if shipping_domestic is not None and "bitcoin" not in \
vendor_listing["shipping"]["flat_fee"]:
vendor_listing["shipping"]["flat_fee"]["fiat"]["price"][
"domestic"] = shipping_domestic
if shipping_international is not None and "bitcoin" not in \
vendor_listing["shipping"]["flat_fee"]:
vendor_listing["shipping"]["flat_fee"]["fiat"]["price"][
"international"] = shipping_international
if shipping_domestic is not None and "bitcoin" in \
vendor_listing["shipping"]["flat_fee"]:
vendor_listing["shipping"]["flat_fee"]["bitcoin"][
"domestic"] = shipping_domestic
if shipping_international is not None and "bitcoin" in \
vendor_listing["shipping"]["flat_fee"]:
vendor_listing["shipping"]["flat_fee"]["bitcoin"][
"international"] = shipping_international
if shipping_regions is not None:
vendor_listing["shipping"]["shipping_regions"] = shipping_regions
if est_delivery_domestic is not None:
vendor_listing["shipping"]["est_delivery"]["domestic"] = est_delivery_domestic
if est_delivery_international is not None:
vendor_listing["shipping"]["est_delivery"][
"international"] = est_delivery_international
self.save()
def delete(self, delete_images=True):
"""
Deletes the contract json from the OpenBazaar directory as well as the listing
metadata from the db and all the related images in the file system.
"""
# build the file_name from the contract
file_name = str(self.contract["vendor_offer"]["listing"]["item"]["title"][:100])
file_name = re.sub(r"[^\w\s]", '', file_name)
file_name = re.sub(r"\s+", '_', file_name)
file_path = DATA_FOLDER + "store/listings/contracts/" + file_name + ".json"
h = HashMap()
# maybe delete the images from disk
if "image_hashes" in self.contract["vendor_offer"]["listing"]["item"] and delete_images:
for image_hash in self.contract["vendor_offer"]["listing"]["item"]["image_hashes"]:
# delete from disk
image_path = h.get_file(unhexlify(image_hash))
if os.path.exists(image_path):
os.remove(image_path)
# remove pointer to the image from the HashMap
h.delete(unhexlify(image_hash))
# delete the contract from disk
if os.path.exists(file_path):
os.remove(file_path)
# delete the listing metadata from the db
contract_hash = digest(json.dumps(self.contract, indent=4))
ListingsStore().delete_listing(contract_hash)
# remove the pointer to the contract from the HashMap
h.delete(contract_hash)
def save(self):
"""
Saves the json contract into the OpenBazaar/store/listings/contracts/ directory.
It uses the title as the file name so it's easy on human eyes. A mapping of the
hash of the contract and file path is stored in the database so we can retrieve
the contract with only its hash.
Additionally, the contract metadata (sent in response to the GET_LISTINGS query)
is saved in the db for fast access.
"""
# get the contract title to use as the file name and format it
file_name = str(self.contract["vendor_offer"]["listing"]["item"]["title"][:100])
file_name = re.sub(r"[^\w\s]", '', file_name)
file_name = re.sub(r"\s+", '_', file_name)
# save the json contract to the file system
file_path = DATA_FOLDER + "store/listings/contracts/" + file_name + ".json"
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(self.contract, indent=4))
# Create a `ListingMetadata` protobuf object using data from the full contract
listings = Listings()
data = listings.ListingMetadata()
data.contract_hash = digest(json.dumps(self.contract, indent=4))
vendor_item = self.contract["vendor_offer"]["listing"]["item"]
data.title = vendor_item["title"]
if "image_hashes" in vendor_item:
data.thumbnail_hash = unhexlify(vendor_item["image_hashes"][0])
data.category = vendor_item["category"]
if "bitcoin" not in vendor_item["price_per_unit"]:
data.price = float(vendor_item["price_per_unit"]["fiat"]["price"])
data.currency_code = vendor_item["price_per_unit"]["fiat"][
"currency_code"]
else:
data.price = float(vendor_item["price_per_unit"]["bitcoin"])
data.currency_code = "BTC"
data.nsfw = vendor_item["nsfw"]
if "shipping" not in self.contract["vendor_offer"]["listing"]:
data.origin = CountryCode.Value("NA")
else:
data.origin = CountryCode.Value(
self.contract["vendor_offer"]["listing"]["shipping"]["shipping_origin"].upper())
for region in self.contract["vendor_offer"]["listing"]["shipping"]["shipping_regions"]:
data.ships_to.append(CountryCode.Value(region.upper()))
# save the mapping of the contract file path and contract hash in the database
HashMap().insert(data.contract_hash, file_path)
# save the `ListingMetadata` protobuf to the database as well
ListingsStore().add_listing(data)
| {
"repo_name": "bankonme/OpenBazaar-Server",
"path": "market/contracts.py",
"copies": "2",
"size": "20799",
"license": "mit",
"hash": 2402427947767430700,
"line_mean": 49.3607748184,
"line_max": 100,
"alpha_frac": 0.5393047743,
"autogenerated": false,
"ratio": 4.5853174603174605,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.001465695224023398,
"num_lines": 413
} |
__author__ = 'chris'
import json
from binascii import unhexlify, hexlify
from collections import OrderedDict
import re
import os
import nacl.encoding
from protos.objects import Listings
from protos.countries import CountryCode
from dht.utils import digest
from constants import DATA_FOLDER
from db.datastore import HashMap, ListingsStore
from market.profile import Profile
from keyutils.keys import KeyChain
class Contract(object):
"""
A class for creating and interacting with OpenBazaar Ricardian contracts.
"""
def __init__(self, contract=None, hash_value=None):
"""
This class can be instantiated with either an `OrderedDict` or a hash
of a contract. If a hash is used, we will load the contract from either
the file system or cache.
Alternatively, pass in no parameters if the intent is to create a new
contract.
Args:
contract: an `OrderedDict` containing a filled out json contract
hash: a hash (in raw bytes) of a contract
"""
if contract is not None:
self.contract = contract
elif hash_value is not None:
try:
file_path = HashMap().get_file(hash_value)
if file_path is None:
file_path = DATA_FOLDER + "cache/" + hexlify(hash_value)
with open(file_path, 'r') as filename:
self.contract = json.load(filename, object_pairs_hook=OrderedDict)
except Exception:
self.contract = {}
else:
self.contract = {}
def create(self,
expiration_date,
metadata_category,
title,
description,
currency_code,
price,
process_time,
nsfw,
shipping_origin,
shipping_regions,
est_delivery_domestic=None,
est_delivery_international=None,
keywords=None,
category=None,
condition=None,
sku=None,
images=None,
free_shipping=None,
shipping_currency_code=None,
shipping_domestic=None,
shipping_international=None):
"""
All parameters are strings except:
:param expiration_date: `string` (must be formatted UTC datetime)
:param keywords: `list`
:param nsfw: `boolean`
:param images: a `list` of image files
:param free_shipping: `boolean`
:param shipping_origin: a 'string' formatted `CountryCode`
:param shipping_regions: a 'list' of 'string' formatted `CountryCode`s
"""
# TODO: import keys into the contract, import moderator information from db, sign contract.
profile = Profile().get()
keychain = KeyChain()
self.contract = OrderedDict(
{
"vendor_offer": {
"listing": {
"metadata": {
"version": "0.1",
"expiry": expiration_date + " UTC",
"category": metadata_category,
"category_sub": "fixed price"
},
"id": {
"guid": keychain.guid.encode("hex"),
"pubkeys": {
"guid": keychain.guid_signed_pubkey[64:].encode("hex"),
"bitcoin": keychain.bitcoin_master_pubkey
}
},
"item": {
"title": title,
"description": description,
"process_time": process_time,
"price_per_unit": {},
"nsfw": nsfw
}
}
}
}
)
if metadata_category == "physical good" and condition is not None:
self.contract["vendor_offer"]["listing"]["item"]["condition"] = condition
if currency_code.upper() == "BTC":
item = self.contract["vendor_offer"]["listing"]["item"]
item["price_per_unit"]["bitcoin"] = price
else:
item = self.contract["vendor_offer"]["listing"]["item"]
item["price_per_unit"]["fiat"] = {}
item["price_per_unit"]["fiat"]["price"] = price
item["price_per_unit"]["fiat"]["currency_code"] = currency_code
if keywords is not None:
self.contract["vendor_offer"]["listing"]["item"]["keywords"] = []
self.contract["vendor_offer"]["listing"]["item"]["keywords"].extend(keywords)
if category is not None:
self.contract["vendor_offer"]["listing"]["item"]["category"] = category
if sku is not None:
self.contract["vendor_offer"]["listing"]["item"]["sku"] = sku
if metadata_category == "physical good":
self.contract["vendor_offer"]["listing"]["shipping"] = {}
shipping = self.contract["vendor_offer"]["listing"]["shipping"]
shipping["shipping_origin"] = shipping_origin
if free_shipping is False:
self.contract["vendor_offer"]["listing"]["shipping"]["free"] = False
self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"] = {}
if shipping_currency_code == "BTC":
self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["bitcoin"] = {}
self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["bitcoin"][
"domestic"] = shipping_domestic
self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["bitcoin"][
"international"] = shipping_international
else:
shipping = self.contract["vendor_offer"]["listing"]["shipping"]
shipping["flat_fee"]["fiat"] = {}
shipping["flat_fee"]["fiat"]["price"] = {}
shipping["flat_fee"]["fiat"]["price"][
"domestic"] = shipping_domestic
shipping["flat_fee"]["fiat"]["price"][
"international"] = shipping_international
shipping["flat_fee"]["fiat"][
"currency_code"] = shipping_currency_code
else:
self.contract["vendor_offer"]["listing"]["shipping"]["free"] = True
self.contract["vendor_offer"]["listing"]["shipping"]["shipping_regions"] = []
for region in shipping_regions:
shipping = self.contract["vendor_offer"]["listing"]["shipping"]
shipping["shipping_regions"].append(region)
listing = self.contract["vendor_offer"]["listing"]
listing["shipping"]["est_delivery"] = {}
listing["shipping"]["est_delivery"]["domestic"] = est_delivery_domestic
listing["shipping"]["est_delivery"][
"international"] = est_delivery_international
if profile.HasField("handle"):
self.contract["vendor_offer"]["listing"]["id"]["blockchain_id"] = profile.handle
if images is not None:
self.contract["vendor_offer"]["listing"]["item"]["image_hashes"] = []
for image in images:
hash_value = digest(image).encode("hex")
self.contract["vendor_offer"]["listing"]["item"]["image_hashes"].append(hash_value)
with open(DATA_FOLDER + "store/media/" + hash_value, 'w') as outfile:
outfile.write(image)
HashMap().insert(digest(image), DATA_FOLDER + "store/media/" + hash_value)
listing = json.dumps(self.contract["vendor_offer"]["listing"], indent=4)
self.contract["vendor_offer"]["signature"] = \
keychain.signing_key.sign(listing, encoder=nacl.encoding.HexEncoder)[:128]
self.save()
def update(self,
expiration_date=None,
metadata_category=None,
title=None,
description=None,
currency_code=None,
price=None,
process_time=None,
nsfw=None,
est_delivery_domestic=None,
est_delivery_international=None,
shipping_origin=None,
shipping_regions=None,
keywords=None,
category=None,
condition=None,
sku=None,
image_hashes=None, # if intending to delete an image, pass in
# the hashes that are staying.
images=None, # to add new images pass in a list of image files.
free_shipping=None,
shipping_currency_code=None,
shipping_domestic=None,
shipping_international=None):
self.delete(False)
vendor_listing = self.contract["vendor_offer"]["listing"]
if expiration_date is not None:
vendor_listing["item"]["expiry"] = expiration_date
if metadata_category is not None:
vendor_listing["metadata"]["category"] = metadata_category
if metadata_category != "physical good" and vendor_listing["metadata"][
"category"] == "physical good":
del vendor_listing["shipping"]
elif metadata_category == "physical good" and vendor_listing["metadata"][
"category"] != "physical good":
vendor_listing["shipping"] = {}
vendor_listing["shipping"]["est_delivery"] = {}
vendor_listing["shipping"]["free"] = False
if title is not None:
vendor_listing["item"]["title"] = title
if description is not None:
vendor_listing["item"]["description"] = description
if currency_code is not None:
if currency_code.upper() != "BTC" and "bitcoin" \
in vendor_listing["item"]["price_per_unit"]:
p = vendor_listing["item"]["price_per_unit"]["bitcoin"]
del vendor_listing["item"]["price_per_unit"]["bitcoin"]
vendor_listing["item"]["price_per_unit"]["fiat"] = {}
vendor_listing["item"]["price_per_unit"]["fiat"][
"currency_code"] = currency_code
vendor_listing["item"]["price_per_unit"]["fiat"]["price"] = p
elif currency_code.upper() == "BTC" and "fiat" in \
vendor_listing["item"]["price_per_unit"]:
p = vendor_listing["item"]["price_per_unit"]["fiat"]["price"]
del vendor_listing["item"]["price_per_unit"]["fiat"]
vendor_listing["item"]["price_per_unit"]["bitcoin"] = p
if price is not None:
if "bitcoin" in vendor_listing["item"]["price_per_unit"]:
vendor_listing["item"]["price_per_unit"]["bitcoin"] = price
else:
vendor_listing["item"]["price_per_unit"]["fiat"]["price"] = price
if process_time is not None:
vendor_listing["item"]["process_time"] = process_time
if nsfw is not None:
vendor_listing["item"]["nsfw"] = nsfw
if keywords is not None:
vendor_listing["item"]["keywords"] = []
vendor_listing["item"]["keywords"].extend(keywords)
if category is not None:
vendor_listing["item"]["category"] = category
if image_hashes is not None:
to_delete = list(set(vendor_listing["item"]["image_hashes"]) - set(image_hashes))
for image_hash in to_delete:
# delete from disk
h = HashMap()
image_path = h.get_file(unhexlify(image_hash))
if os.path.exists(image_path):
os.remove(image_path)
# remove pointer to the image from the HashMap
h.delete(unhexlify(image_hash))
vendor_listing["item"]["image_hashes"] = []
vendor_listing["item"]["image_hashes"].extend(image_hashes)
if images is not None:
if "image_hashes" not in vendor_listing["item"]:
vendor_listing["item"]["image_hashes"] = []
for image in images:
hash_value = digest(image).encode("hex")
vendor_listing["item"]["image_hashes"].append(hash_value)
with open(DATA_FOLDER + "store/media/" + hash_value, 'w') as outfile:
outfile.write(image)
HashMap().insert(digest(image), DATA_FOLDER + "store/media/" + hash_value)
if vendor_listing["metadata"]["category"] == "physical good" and condition is not None:
vendor_listing["item"]["condition"] = condition
if sku is not None:
vendor_listing["item"]["sku"] = sku
if vendor_listing["metadata"]["category"] == "physical good":
if shipping_origin is not None:
vendor_listing["shipping"]["shipping_origin"] = shipping_origin
if free_shipping is not None:
if free_shipping is True and vendor_listing["shipping"]["free"] is False:
vendor_listing["shipping"]["free"] = True
del vendor_listing["shipping"]["flat_fee"]
elif free_shipping is False and vendor_listing["shipping"]["free"] is True:
vendor_listing["shipping"]["flat_fee"] = {}
vendor_listing["shipping"]["flat_fee"]["bitcoin"] = {}
vendor_listing["shipping"]["free"] = False
if shipping_currency_code is not None and vendor_listing["shipping"]["free"] is False:
if shipping_currency_code == "BTC" and "bitcoin" not in \
vendor_listing["shipping"]["flat_fee"]:
vendor_listing["shipping"]["flat_fee"]["bitcoin"] = {}
d = vendor_listing["shipping"]["flat_fee"]["fiat"]["price"]["domestic"]
i = vendor_listing["shipping"]["flat_fee"]["fiat"]["price"][
"international"]
vendor_listing["shipping"]["flat_fee"]["bitcoin"]["domestic"] = d
vendor_listing["shipping"]["flat_fee"]["bitcoin"]["international"] = i
del vendor_listing["shipping"]["flat_fee"]["fiat"]
elif shipping_currency_code != "BTC" and "bitcoin" in \
vendor_listing["shipping"]["flat_fee"]:
d = vendor_listing["shipping"]["flat_fee"]["bitcoin"]["domestic"]
i = vendor_listing["shipping"]["flat_fee"]["bitcoin"]["international"]
vendor_listing["shipping"]["flat_fee"]["fiat"] = {}
vendor_listing["shipping"]["flat_fee"]["fiat"]["price"] = {}
vendor_listing["shipping"]["flat_fee"]["fiat"]["price"]["domestic"] = d
vendor_listing["shipping"]["flat_fee"]["fiat"]["price"][
"international"] = i
vendor_listing["shipping"]["flat_fee"]["fiat"][
"currency_code"] = shipping_currency_code
del vendor_listing["shipping"]["flat_fee"]["bitcoin"]
if shipping_domestic is not None and "bitcoin" not in \
vendor_listing["shipping"]["flat_fee"]:
vendor_listing["shipping"]["flat_fee"]["fiat"]["price"][
"domestic"] = shipping_domestic
if shipping_international is not None and "bitcoin" not in \
vendor_listing["shipping"]["flat_fee"]:
vendor_listing["shipping"]["flat_fee"]["fiat"]["price"][
"international"] = shipping_international
if shipping_domestic is not None and "bitcoin" in \
vendor_listing["shipping"]["flat_fee"]:
vendor_listing["shipping"]["flat_fee"]["bitcoin"][
"domestic"] = shipping_domestic
if shipping_international is not None and "bitcoin" in \
vendor_listing["shipping"]["flat_fee"]:
vendor_listing["shipping"]["flat_fee"]["bitcoin"][
"international"] = shipping_international
if shipping_regions is not None:
vendor_listing["shipping"]["shipping_regions"] = shipping_regions
if est_delivery_domestic is not None:
vendor_listing["shipping"]["est_delivery"]["domestic"] = est_delivery_domestic
if est_delivery_international is not None:
vendor_listing["shipping"]["est_delivery"][
"international"] = est_delivery_international
self.save()
def get_contract_id(self):
contract = json.dumps(self.contract, indent=4)
return digest(contract)
def delete(self, delete_images=True):
"""
Deletes the contract json from the OpenBazaar directory as well as the listing
metadata from the db and all the related images in the file system.
"""
# build the file_name from the contract
file_name = str(self.contract["vendor_offer"]["listing"]["item"]["title"][:100])
file_name = re.sub(r"[^\w\s]", '', file_name)
file_name = re.sub(r"\s+", '_', file_name)
file_path = DATA_FOLDER + "store/listings/contracts/" + file_name + ".json"
h = HashMap()
# maybe delete the images from disk
if "image_hashes" in self.contract["vendor_offer"]["listing"]["item"] and delete_images:
for image_hash in self.contract["vendor_offer"]["listing"]["item"]["image_hashes"]:
# delete from disk
image_path = h.get_file(unhexlify(image_hash))
if os.path.exists(image_path):
os.remove(image_path)
# remove pointer to the image from the HashMap
h.delete(unhexlify(image_hash))
# delete the contract from disk
if os.path.exists(file_path):
os.remove(file_path)
# delete the listing metadata from the db
contract_hash = digest(json.dumps(self.contract, indent=4))
ListingsStore().delete_listing(contract_hash)
# remove the pointer to the contract from the HashMap
h.delete(contract_hash)
def save(self):
"""
Saves the json contract into the OpenBazaar/store/listings/contracts/ directory.
It uses the title as the file name so it's easy on human eyes. A mapping of the
hash of the contract and file path is stored in the database so we can retrieve
the contract with only its hash.
Additionally, the contract metadata (sent in response to the GET_LISTINGS query)
is saved in the db for fast access.
"""
# get the contract title to use as the file name and format it
file_name = str(self.contract["vendor_offer"]["listing"]["item"]["title"][:100])
file_name = re.sub(r"[^\w\s]", '', file_name)
file_name = re.sub(r"\s+", '_', file_name)
# save the json contract to the file system
file_path = DATA_FOLDER + "store/listings/contracts/" + file_name + ".json"
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(self.contract, indent=4))
# Create a `ListingMetadata` protobuf object using data from the full contract
listings = Listings()
data = listings.ListingMetadata()
data.contract_hash = digest(json.dumps(self.contract, indent=4))
vendor_item = self.contract["vendor_offer"]["listing"]["item"]
data.title = vendor_item["title"]
if "image_hashes" in vendor_item:
data.thumbnail_hash = unhexlify(vendor_item["image_hashes"][0])
data.category = vendor_item["category"]
if "bitcoin" not in vendor_item["price_per_unit"]:
data.price = float(vendor_item["price_per_unit"]["fiat"]["price"])
data.currency_code = vendor_item["price_per_unit"]["fiat"][
"currency_code"]
else:
data.price = float(vendor_item["price_per_unit"]["bitcoin"])
data.currency_code = "BTC"
data.nsfw = vendor_item["nsfw"]
if "shipping" not in self.contract["vendor_offer"]["listing"]:
data.origin = CountryCode.Value("NA")
else:
data.origin = CountryCode.Value(
self.contract["vendor_offer"]["listing"]["shipping"]["shipping_origin"].upper())
for region in self.contract["vendor_offer"]["listing"]["shipping"]["shipping_regions"]:
data.ships_to.append(CountryCode.Value(region.upper()))
# save the mapping of the contract file path and contract hash in the database
HashMap().insert(data.contract_hash, file_path)
# save the `ListingMetadata` protobuf to the database as well
ListingsStore().add_listing(data)
| {
"repo_name": "hoffmabc/OpenBazaar-Server",
"path": "market/contracts.py",
"copies": "2",
"size": "21201",
"license": "mit",
"hash": 4553071492925365000,
"line_mean": 49.1205673759,
"line_max": 100,
"alpha_frac": 0.5409650488,
"autogenerated": false,
"ratio": 4.569181034482758,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6110146083282758,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import json
from collections import OrderedDict
import nacl.signing
import nacl.utils
import nacl.encoding
import nacl.hash
from nacl.public import PrivateKey, PublicKey, Box
from zope.interface import implements
from zope.interface.verify import verifyObject
from zope.interface.exceptions import DoesNotImplement
from net.rpcudp import RPCProtocol
from interfaces import MessageProcessor
from log import Logger
from protos.message import GET_CONTRACT, GET_IMAGE, GET_PROFILE, GET_LISTINGS, \
GET_USER_METADATA, FOLLOW, UNFOLLOW, GET_FOLLOWERS, GET_FOLLOWING, BROADCAST, \
GET_CONTRACT_METADATA, MESSAGE, ORDER, ORDER_CONFIRMATION, COMPLETE_ORDER, DISPUTE_OPEN, \
DISPUTE_CLOSE
from market.contracts import Contract
from market.profile import Profile
from protos.objects import Metadata, Listings, Followers, Plaintext_Message
from interfaces import BroadcastListener, MessageListener, NotificationListener
from keyutils.bip32utils import derive_childkey
class MarketProtocol(RPCProtocol):
implements(MessageProcessor)
def __init__(self, node, router, signing_key, database):
self.router = router
self.node = node
RPCProtocol.__init__(self, node, router)
self.log = Logger(system=self)
self.multiplexer = None
self.db = database
self.signing_key = signing_key
self.listeners = []
self.handled_commands = [GET_CONTRACT, GET_IMAGE, GET_PROFILE, GET_LISTINGS, GET_USER_METADATA,
GET_CONTRACT_METADATA, FOLLOW, UNFOLLOW, GET_FOLLOWERS, GET_FOLLOWING,
BROADCAST, MESSAGE, ORDER, ORDER_CONFIRMATION, COMPLETE_ORDER, DISPUTE_OPEN,
DISPUTE_CLOSE]
def connect_multiplexer(self, multiplexer):
self.multiplexer = multiplexer
def add_listener(self, listener):
self.listeners.append(listener)
def rpc_get_contract(self, sender, contract_hash):
self.log.info("serving contract %s to %s" % (contract_hash.encode('hex'), sender))
self.router.addContact(sender)
try:
with open(self.db.HashMap().get_file(contract_hash.encode("hex")), "r") as filename:
contract = filename.read()
return [contract]
except Exception:
self.log.warning("could not find contract %s" % contract_hash.encode('hex'))
return None
def rpc_get_image(self, sender, image_hash):
self.router.addContact(sender)
try:
if len(image_hash) != 20:
raise Exception("Invalid image hash")
self.log.info("serving image %s to %s" % (image_hash.encode('hex'), sender))
with open(self.db.HashMap().get_file(image_hash.encode("hex")), "rb") as filename:
image = filename.read()
return [image]
except Exception:
self.log.warning("could not find image %s" % image_hash[:20].encode('hex'))
return None
def rpc_get_profile(self, sender):
self.log.info("serving profile to %s" % sender)
self.router.addContact(sender)
try:
proto = Profile(self.db).get(True)
return [proto, self.signing_key.sign(proto)[:64]]
except Exception:
self.log.error("unable to load the profile")
return None
def rpc_get_user_metadata(self, sender):
self.log.info("serving user metadata to %s" % sender)
self.router.addContact(sender)
try:
proto = Profile(self.db).get(False)
m = Metadata()
m.name = proto.name
m.handle = proto.handle
m.short_description = proto.short_description
m.avatar_hash = proto.avatar_hash
m.nsfw = proto.nsfw
return [m.SerializeToString(), self.signing_key.sign(m.SerializeToString())[:64]]
except Exception:
self.log.error("unable to load profile metadata")
return None
def rpc_get_listings(self, sender):
self.log.info("serving store listings to %s" % sender)
self.router.addContact(sender)
try:
p = Profile(self.db).get()
l = Listings()
l.ParseFromString(self.db.ListingsStore().get_proto())
l.handle = p.handle
l.avatar_hash = p.avatar_hash
return [l.SerializeToString(), self.signing_key.sign(l.SerializeToString())[:64]]
except Exception:
self.log.warning("could not find any listings in the database")
return None
def rpc_get_contract_metadata(self, sender, contract_hash):
self.log.info("serving metadata for contract %s to %s" % (contract_hash.encode("hex"), sender))
self.router.addContact(sender)
try:
proto = self.db.ListingsStore().get_proto()
l = Listings()
l.ParseFromString(proto)
for listing in l.listing:
if listing.contract_hash == contract_hash:
ser = listing.SerializeToString()
return [ser, self.signing_key.sign(ser)[:64]]
except Exception:
self.log.warning("could not find metadata for contract %s" % contract_hash.encode("hex"))
return None
def rpc_follow(self, sender, proto, signature):
self.log.info("received follow request from %s" % sender)
self.router.addContact(sender)
try:
verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
verify_key.verify(proto, signature)
f = Followers.Follower()
f.ParseFromString(proto)
if f.guid != sender.id:
raise Exception('GUID does not match sending node')
if f.following != self.node.id:
raise Exception('Following wrong node')
f.signature = signature
self.db.FollowData().set_follower(f)
proto = Profile(self.db).get(False)
m = Metadata()
m.name = proto.name
m.handle = proto.handle
m.avatar_hash = proto.avatar_hash
m.nsfw = proto.nsfw
for listener in self.listeners:
try:
verifyObject(NotificationListener, listener)
listener.notify(sender.id, f.metadata.handle, "follow", "", "", f.metadata.avatar_hash)
except DoesNotImplement:
pass
return ["True", m.SerializeToString(), self.signing_key.sign(m.SerializeToString())[:64]]
except Exception:
self.log.warning("failed to validate follower")
return ["False"]
def rpc_unfollow(self, sender, signature):
self.log.info("received unfollow request from %s" % sender)
self.router.addContact(sender)
try:
verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
verify_key.verify("unfollow:" + self.node.id, signature)
f = self.db.FollowData()
f.delete_follower(sender.id)
return ["True"]
except Exception:
self.log.warning("failed to validate signature on unfollow request")
return ["False"]
def rpc_get_followers(self, sender):
self.log.info("serving followers list to %s" % sender)
self.router.addContact(sender)
ser = self.db.FollowData().get_followers()
if ser is None:
return None
else:
return [ser, self.signing_key.sign(ser)[:64]]
def rpc_get_following(self, sender):
self.log.info("serving following list to %s" % sender)
self.router.addContact(sender)
ser = self.db.FollowData().get_following()
if ser is None:
return None
else:
return [ser, self.signing_key.sign(ser)[:64]]
def rpc_broadcast(self, sender, message, signature):
if len(message) <= 140 and self.db.FollowData().is_following(sender.id):
try:
verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
verify_key.verify(message, signature)
except Exception:
self.log.warning("received invalid broadcast from %s" % sender)
return ["False"]
self.log.info("received a broadcast from %s" % sender)
self.router.addContact(sender)
for listener in self.listeners:
try:
verifyObject(BroadcastListener, listener)
listener.notify(sender.id, message)
except DoesNotImplement:
pass
return ["True"]
else:
return ["False"]
def rpc_message(self, sender, pubkey, encrypted):
try:
box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
plaintext = box.decrypt(encrypted)
p = Plaintext_Message()
p.ParseFromString(plaintext)
signature = p.signature
p.ClearField("signature")
verify_key = nacl.signing.VerifyKey(p.signed_pubkey[64:])
verify_key.verify(p.SerializeToString(), signature)
h = nacl.hash.sha512(p.signed_pubkey)
pow_hash = h[64:128]
if int(pow_hash[:6], 16) >= 50 or p.sender_guid.encode("hex") != h[:40] or p.sender_guid != sender.id:
raise Exception('Invalid guid')
self.log.info("received a message from %s" % sender)
self.router.addContact(sender)
for listener in self.listeners:
try:
verifyObject(MessageListener, listener)
listener.notify(p, signature)
except DoesNotImplement:
pass
return ["True"]
except Exception:
self.log.warning("received invalid message from %s" % sender)
return ["False"]
def rpc_order(self, sender, pubkey, encrypted):
try:
box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
order = box.decrypt(encrypted)
c = Contract(self.db, contract=json.loads(order, object_pairs_hook=OrderedDict),
testnet=self.multiplexer.testnet)
if c.verify(sender.signed_pubkey[64:]):
self.router.addContact(sender)
self.log.info("received an order from %s, waiting for payment..." % sender)
payment_address = c.contract["buyer_order"]["order"]["payment"]["address"]
chaincode = c.contract["buyer_order"]["order"]["payment"]["chaincode"]
masterkey_b = c.contract["buyer_order"]["order"]["id"]["pubkeys"]["bitcoin"]
buyer_key = derive_childkey(masterkey_b, chaincode)
amount = c.contract["buyer_order"]["order"]["payment"]["amount"]
listing_hash = c.contract["buyer_order"]["order"]["ref_hash"]
signature = self.signing_key.sign(
str(payment_address) + str(amount) + str(listing_hash) + str(buyer_key))[:64]
c.await_funding(self.get_notification_listener(), self.multiplexer.blockchain, signature, False)
return [signature]
else:
self.log.warning("received invalid order from %s" % sender)
return ["False"]
except Exception:
self.log.error("unable to decrypt order from %s" % sender)
return ["False"]
def rpc_order_confirmation(self, sender, pubkey, encrypted):
try:
box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
order = box.decrypt(encrypted)
c = Contract(self.db, contract=json.loads(order, object_pairs_hook=OrderedDict),
testnet=self.multiplexer.testnet)
contract_id = c.accept_order_confirmation(self.get_notification_listener())
if contract_id:
self.router.addContact(sender)
self.log.info("received confirmation for order %s" % contract_id)
return ["True"]
else:
self.log.warning("received invalid order confirmation from %s" % sender)
return ["False"]
except Exception:
self.log.error("unable to decrypt order confirmation from %s" % sender)
return ["False"]
def rpc_complete_order(self, sender, pubkey, encrypted):
try:
box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
order = box.decrypt(encrypted)
c = Contract(self.db, contract=json.loads(order, object_pairs_hook=OrderedDict),
testnet=self.multiplexer.testnet)
contract_id = c.accept_receipt(self.get_notification_listener(), self.multiplexer.blockchain)
self.router.addContact(sender)
self.log.info("received receipt for order %s" % contract_id)
return ["True"]
except Exception:
self.log.error("unable to parse receipt from %s" % sender)
return ["False"]
def callGetContract(self, nodeToAsk, contract_hash):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_contract(address, contract_hash)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetImage(self, nodeToAsk, image_hash):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_image(address, image_hash)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetProfile(self, nodeToAsk):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_profile(address)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetUserMetadata(self, nodeToAsk):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_user_metadata(address)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetListings(self, nodeToAsk):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_listings(address)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetContractMetadata(self, nodeToAsk, contract_hash):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_contract_metadata(address, contract_hash)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callFollow(self, nodeToAsk, proto, signature):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.follow(address, proto, signature)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callUnfollow(self, nodeToAsk, signature):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.unfollow(address, signature)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetFollowers(self, nodeToAsk):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_followers(address)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetFollowing(self, nodeToAsk):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_following(address)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callBroadcast(self, nodeToAsk, message, signature):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.broadcast(address, message, signature)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callMessage(self, nodeToAsk, ehemeral_pubkey, ciphertext):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.message(address, ehemeral_pubkey, ciphertext)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callOrder(self, nodeToAsk, ephem_pubkey, encrypted_contract):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.order(address, ephem_pubkey, encrypted_contract)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callOrderConfirmation(self, nodeToAsk, ephem_pubkey, encrypted_contract):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.order_confirmation(address, ephem_pubkey, encrypted_contract)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callCompleteOrder(self, nodeToAsk, ephem_pubkey, encrypted_contract):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.complete_order(address, ephem_pubkey, encrypted_contract)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def handleCallResponse(self, result, node):
"""
If we get a response, add the node to the routing table. If
we get no response, make sure it's removed from the routing table.
"""
if result[0]:
self.router.addContact(node)
else:
self.log.debug("no response from %s, removing from router" % node)
self.router.removeContact(node)
return result
def get_notification_listener(self):
for listener in self.listeners:
try:
verifyObject(NotificationListener, listener)
return listener
except DoesNotImplement:
pass
def __iter__(self):
return iter(self.handled_commands)
| {
"repo_name": "hauxir/OpenBazaar-Server",
"path": "market/protocol.py",
"copies": "1",
"size": "17434",
"license": "mit",
"hash": 7387128829603439000,
"line_mean": 43.1367088608,
"line_max": 114,
"alpha_frac": 0.6135711827,
"autogenerated": false,
"ratio": 4.084817244611059,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5198388427311058,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import json
from twisted.internet.protocol import Protocol, Factory, connectionDone
from twisted.internet.task import LoopingCall
# pylint: disable=W0232
class HeartbeatProtocol(Protocol):
"""
Handles new incoming requests coming from a websocket.
"""
def connectionLost(self, reason=connectionDone):
self.factory.unregister(self)
def connectionMade(self):
self.factory.register(self)
def dataReceived(self, payload):
return
class HeartbeatFactory(Factory):
def __init__(self, only_ip=None):
if only_ip == None:
only_ip = ["127.0.0.1"]
self.only_ip = only_ip
self.status = "starting up"
self.protocol = HeartbeatProtocol
self.libbitcoin = None
self.clients = []
LoopingCall(self._heartbeat).start(10, now=True)
def buildProtocol(self, addr):
if self.status in ("starting up", "generating GUID") and self.only_ip != ["127.0.0.1"]:
return
if addr.host not in self.only_ip and "0.0.0.0" not in self.only_ip:
return
return Factory.buildProtocol(self, addr)
def set_status(self, status):
self.status = status
def register(self, client):
if client not in self.clients:
self.clients.append(client)
self._heartbeat()
def unregister(self, client):
if client in self.clients:
self.clients.remove(client)
def push(self, msg):
for c in self.clients:
c.transport.write(msg)
def _heartbeat(self):
if self.libbitcoin is not None:
libbitcoin_status = "online" if self.libbitcoin.connected else "offline"
else:
libbitcoin_status = "NA"
self.push(json.dumps({
"status": self.status,
"libbitcoin": libbitcoin_status
}))
| {
"repo_name": "saltduck/OpenBazaar-Server",
"path": "net/heartbeat.py",
"copies": "6",
"size": "1889",
"license": "mit",
"hash": -1577614746074175200,
"line_mean": 27.1940298507,
"line_max": 95,
"alpha_frac": 0.6103758602,
"autogenerated": false,
"ratio": 3.9518828451882846,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0008641143024373928,
"num_lines": 67
} |
__author__ = 'chris'
import json
import nacl.signing
import nacl.utils
import nacl.encoding
import nacl.hash
from nacl.public import PrivateKey, PublicKey, Box
from zope.interface import implements
from rpcudp import RPCProtocol
from interfaces import MessageProcessor
from log import Logger
from protos.message import GET_CONTRACT, GET_IMAGE, GET_PROFILE, GET_LISTINGS, \
GET_USER_METADATA, FOLLOW, UNFOLLOW, GET_FOLLOWERS, GET_FOLLOWING, NOTIFY, \
GET_CONTRACT_METADATA, MESSAGE, ORDER, ORDER_CONFIRMATION
from market.contracts import Contract
from market.profile import Profile
from protos.objects import Metadata, Listings, Followers, Plaintext_Message
from binascii import hexlify
from zope.interface.verify import verifyObject
from zope.interface.exceptions import DoesNotImplement
from interfaces import NotificationListener, MessageListener
from collections import OrderedDict
class MarketProtocol(RPCProtocol):
implements(MessageProcessor)
def __init__(self, node_proto, router, signing_key, database):
self.router = router
RPCProtocol.__init__(self, node_proto, router)
self.log = Logger(system=self)
self.multiplexer = None
self.db = database
self.signing_key = signing_key
self.listeners = []
self.handled_commands = [GET_CONTRACT, GET_IMAGE, GET_PROFILE, GET_LISTINGS, GET_USER_METADATA,
GET_CONTRACT_METADATA, FOLLOW, UNFOLLOW, GET_FOLLOWERS, GET_FOLLOWING,
NOTIFY, MESSAGE, ORDER, ORDER_CONFIRMATION]
def connect_multiplexer(self, multiplexer):
self.multiplexer = multiplexer
def add_listener(self, listener):
self.listeners.append(listener)
def rpc_get_contract(self, sender, contract_hash):
self.log.info("Looking up contract ID %s" % contract_hash.encode('hex'))
self.router.addContact(sender)
try:
with open(self.db.HashMap().get_file(contract_hash), "r") as filename:
contract = filename.read()
return [contract]
except Exception:
self.log.warning("Could not find contract %s" % contract_hash.encode('hex'))
return ["None"]
def rpc_get_image(self, sender, image_hash):
self.log.info("Looking up image with hash %s" % image_hash.encode('hex'))
self.router.addContact(sender)
try:
with open(self.db.HashMap().get_file(image_hash), "r") as filename:
image = filename.read()
return [image]
except Exception:
self.log.warning("Could not find image %s" % image_hash.encode('hex'))
return ["None"]
def rpc_get_profile(self, sender):
self.log.info("Fetching profile")
self.router.addContact(sender)
try:
proto = Profile(self.db).get(True)
return [proto, self.signing_key.sign(proto)[:64]]
except Exception:
self.log.error("Unable to load the profile")
return ["None"]
def rpc_get_user_metadata(self, sender):
self.log.info("Fetching metadata")
self.router.addContact(sender)
try:
proto = Profile(self.db).get(False)
m = Metadata()
m.name = proto.name
m.handle = proto.handle
m.short_description = proto.short_description
m.avatar_hash = proto.avatar_hash
m.nsfw = proto.nsfw
return [m.SerializeToString(), self.signing_key.sign(m.SerializeToString())[:64]]
except Exception:
self.log.error("Unable to get the profile metadata")
return ["None"]
def rpc_get_listings(self, sender):
self.log.info("Fetching listings")
self.router.addContact(sender)
try:
p = Profile(self.db).get()
l = Listings()
l.ParseFromString(self.db.ListingsStore().get_proto())
l.handle = p.handle
l.avatar_hash = p.avatar_hash
return [l.SerializeToString(), self.signing_key.sign(l.SerializeToString())[:64]]
except Exception:
self.log.warning("Could not find any listings in the database")
return ["None"]
def rpc_get_contract_metadata(self, sender, contract_hash):
self.log.info("Fetching metadata for contract %s" % hexlify(contract_hash))
self.router.addContact(sender)
try:
proto = self.db.ListingsStore().get_proto()
l = Listings()
l.ParseFromString(proto)
for listing in l.listing:
if listing.contract_hash == contract_hash:
ser = listing.SerializeToString()
return [ser, self.signing_key.sign(ser)[:64]]
except Exception:
self.log.warning("Could not find metadata for contract %s" % hexlify(contract_hash))
return ["None"]
def rpc_follow(self, sender, proto, signature):
self.log.info("Follow request from %s" % sender.id.encode("hex"))
self.router.addContact(sender)
try:
verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
verify_key.verify(proto, signature)
f = Followers.Follower()
f.ParseFromString(proto)
if f.guid != sender.id:
raise Exception('GUID does not match sending node')
if f.following != self.proto.guid:
raise Exception('Following wrong node')
f.signature = signature
self.db.FollowData().set_follower(f)
proto = Profile(self.db).get(False)
m = Metadata()
m.name = proto.name
m.handle = proto.handle
m.avatar_hash = proto.avatar_hash
m.nsfw = proto.nsfw
return ["True", m.SerializeToString(), self.signing_key.sign(m.SerializeToString())[:64]]
except Exception:
self.log.warning("Failed to validate follower")
return ["False"]
def rpc_unfollow(self, sender, signature):
self.log.info("Unfollow request from %s" % sender.id.encode("hex"))
self.router.addContact(sender)
try:
verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
verify_key.verify("unfollow:" + self.proto.guid, signature)
f = self.db.FollowData()
f.delete_follower(sender.id)
return ["True"]
except Exception:
self.log.warning("Failed to validate follower signature")
return ["False"]
def rpc_get_followers(self, sender):
self.log.info("Fetching followers list from db")
self.router.addContact(sender)
ser = self.db.FollowData().get_followers()
if ser is None:
return ["None"]
else:
return [ser, self.signing_key.sign(ser)[:64]]
def rpc_get_following(self, sender):
self.log.info("Fetching following list from db")
self.router.addContact(sender)
ser = self.db.FollowData().get_following()
if ser is None:
return ["None"]
else:
return [ser, self.signing_key.sign(ser)[:64]]
def rpc_notify(self, sender, message, signature):
if len(message) <= 140 and self.db.FollowData().is_following(sender.id):
try:
verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
verify_key.verify(message, signature)
except Exception:
return ["False"]
self.log.info("Received a notification from %s" % sender)
self.router.addContact(sender)
for listener in self.listeners:
try:
verifyObject(NotificationListener, listener)
listener.notify(sender.id, message)
except DoesNotImplement:
pass
return ["True"]
else:
return ["False"]
def rpc_message(self, sender, pubkey, encrypted):
try:
box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
plaintext = box.decrypt(encrypted)
p = Plaintext_Message()
p.ParseFromString(plaintext)
signature = p.signature
p.ClearField("signature")
verify_key = nacl.signing.VerifyKey(p.signed_pubkey[64:])
verify_key.verify(p.SerializeToString(), signature)
h = nacl.hash.sha512(p.signed_pubkey)
pow_hash = h[64:128]
if int(pow_hash[:6], 16) >= 50 or hexlify(p.sender_guid) != h[:40] or p.sender_guid != sender.id:
raise Exception('Invalid guid')
self.log.info("Received a message from %s" % sender)
self.router.addContact(sender)
for listener in self.listeners:
try:
verifyObject(MessageListener, listener)
listener.notify(p, signature)
except DoesNotImplement:
pass
return ["True"]
except Exception:
self.log.error("Received invalid message from %s" % sender)
return ["False"]
def rpc_order(self, sender, pubkey, encrypted):
try:
box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
order = box.decrypt(encrypted)
c = Contract(self.db, contract=json.loads(order, object_pairs_hook=OrderedDict),
testnet=self.multiplexer.testnet)
if c.verify(sender.signed_pubkey[64:]):
self.router.addContact(sender)
self.log.info("Received an order from %s" % sender)
payment_address = c.contract["buyer_order"]["order"]["payment"]["address"]
signature = self.signing_key.sign(str(payment_address))[:64]
c.await_funding(self.multiplexer.ws, self.multiplexer.blockchain, signature, False)
return [signature]
else:
self.log.error("Received invalid order from %s" % sender)
return ["False"]
except Exception:
self.log.error("Unable to decrypt order from %s" % sender)
return ["False"]
def rpc_order_confirmation(self, sender, pubkey, encrypted):
try:
box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
order = box.decrypt(encrypted)
c = Contract(self.db, contract=json.loads(order, object_pairs_hook=OrderedDict),
testnet=self.multiplexer.testnet)
contract_id = c.accept_order_confirmation(self.multiplexer.ws)
if contract_id:
self.router.addContact(sender)
self.log.info("Received confirmation for order %s" % contract_id)
return ["True"]
else:
self.log.error("Received invalid order confirmation from %s" % sender)
return ["False"]
except Exception:
self.log.error("Unable to decrypt order confirmation from %s" % sender)
return ["False"]
def callGetContract(self, nodeToAsk, contract_hash):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_contract(address, contract_hash)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetImage(self, nodeToAsk, image_hash):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_image(address, image_hash)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetProfile(self, nodeToAsk):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_profile(address)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetUserMetadata(self, nodeToAsk):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_user_metadata(address)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetListings(self, nodeToAsk):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_listings(address)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetContractMetadata(self, nodeToAsk, contract_hash):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_contract_metadata(address, contract_hash)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callFollow(self, nodeToAsk, proto, signature):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.follow(address, proto, signature)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callUnfollow(self, nodeToAsk, signature):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.unfollow(address, signature)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetFollowers(self, nodeToAsk):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_followers(address)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetFollowing(self, nodeToAsk):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_following(address)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callNotify(self, nodeToAsk, message, signature):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.notify(address, message, signature)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callMessage(self, nodeToAsk, ehemeral_pubkey, ciphertext):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.message(address, ehemeral_pubkey, ciphertext)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callOrder(self, nodeToAsk, ephem_pubkey, encrypted_contract):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.order(address, ephem_pubkey, encrypted_contract)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callOrderConfirmation(self, nodeToAsk, ephem_pubkey, encrypted_contract):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.order_confirmation(address, ephem_pubkey, encrypted_contract)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def handleCallResponse(self, result, node):
"""
If we get a response, add the node to the routing table. If
we get no response, make sure it's removed from the routing table.
"""
if result[0]:
self.log.info("got response from %s, adding to router" % node)
self.router.addContact(node)
else:
self.log.debug("no response from %s, removing from router" % node)
self.router.removeContact(node)
return result
def __iter__(self):
return iter(self.handled_commands)
| {
"repo_name": "JimmyMow/OpenBazaar-Server",
"path": "market/protocol.py",
"copies": "3",
"size": "14933",
"license": "mit",
"hash": 7929839632642005000,
"line_mean": 42.0345821326,
"line_max": 109,
"alpha_frac": 0.616219112,
"autogenerated": false,
"ratio": 4.043595992418088,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6159815104418088,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import json
import os
import obelisk
import nacl.encoding
from binascii import unhexlify
from collections import OrderedDict
from functools import wraps
from txrestapi.resource import APIResource
from txrestapi.methods import GET, POST, DELETE
from twisted.web import server
from twisted.web.resource import NoResource
from twisted.web import http
from twisted.web.server import Site
from twisted.internet import defer, reactor, task
from twisted.protocols.basic import FileSender
from config import DATA_FOLDER, RESOLVER, delete_value, set_value, get_value, str_to_bool, TRANSACTION_FEE
from protos.countries import CountryCode
from protos import objects
from keys import blockchainid
from keys.keychain import KeyChain
from dht.utils import digest
from market.profile import Profile
from market.contracts import Contract, check_order_for_payment
from market.btcprice import BtcPrice
from net.upnp import PortMapper
from api.utils import sanitize_html
DEFAULT_RECORDS_COUNT = 20
DEFAULT_RECORDS_OFFSET = 0
class OpenBazaarAPI(APIResource):
"""
This RESTful API allows clients to pull relevant data from the
OpenBazaar daemon for use in a GUI or other application.
"""
# pylint: disable=E0213, E1102
def authenticated(func):
def _authenticate(self, request):
session = request.getSession()
if session not in self.authenticated_sessions:
session.expire()
request.setResponseCode(401)
request.write('<html><body><div><span style="color:red">Authorization Error</span></div>'
'<h2>Permission Denied</h2></body></html>')
request.finish()
return server.NOT_DONE_YET
else:
if request.getHeader("Content-Type") == "application/json":
request.args = json.loads(request.content.read())
func(self, request)
return server.NOT_DONE_YET
return wraps(func)(_authenticate)
def __init__(self, mserver, kserver, protocol, username, password, authenticated_sessions):
self.mserver = mserver
self.kserver = kserver
self.protocol = protocol
self.db = mserver.db
self.keychain = KeyChain(self.db)
self.username = username
self.password = password
self.authenticated_sessions = authenticated_sessions
self.failed_login_attempts = {}
task.LoopingCall(self._keep_sessions_alive).start(890, False)
APIResource.__init__(self)
def _keep_sessions_alive(self):
for session in self.authenticated_sessions:
session.touch()
def _failed_login(self, host):
def remove_ban(host):
del self.failed_login_attempts[host]
if host in self.failed_login_attempts:
self.failed_login_attempts[host] += 1
reactor.callLater(3600, remove_ban, host)
else:
self.failed_login_attempts[host] = 1
@POST('^/api/v1/login')
def login(self, request):
request.setHeader('content-type', "application/json")
if request.getHost().host in self.failed_login_attempts and \
self.failed_login_attempts[request.getHost().host] >= 7:
return json.dumps({"success": False, "reason": "too many attempts"})
try:
if request.args["username"][0] == self.username and request.args["password"][0] == self.password:
self.authenticated_sessions.append(request.getSession())
if request.getHost().host in self.failed_login_attempts:
del self.failed_login_attempts[request.getHost().host]
return json.dumps({"success": True})
else:
raise Exception("Invalid credentials")
except Exception:
self._failed_login(request.getHost().host)
return json.dumps({"success": False, "reason": "invalid username or password"})
@GET('^/api/v1/get_image')
@authenticated
def get_image(self, request):
@defer.inlineCallbacks
def _showImage(resp=None):
@defer.inlineCallbacks
def _setContentDispositionAndSend(file_path, extension, content_type):
request.setHeader('content-disposition', 'filename="%s.%s"' % (file_path, extension))
request.setHeader('content-type', content_type)
request.setHeader('cache-control', 'max-age=604800')
f = open(file_path, "rb")
yield FileSender().beginFileTransfer(f, request)
f.close()
defer.returnValue(0)
if os.path.exists(image_path):
yield _setContentDispositionAndSend(image_path, "jpg", "image/jpeg")
else:
request.setResponseCode(http.NOT_FOUND)
request.write("No such image '%s'" % request.path)
request.finish()
if "hash" in request.args and len(request.args["hash"][0]) == 40:
if self.db.filemap.get_file(request.args["hash"][0]) is not None:
image_path = self.db.filemap.get_file(request.args["hash"][0])
else:
image_path = os.path.join(DATA_FOLDER, "cache", request.args["hash"][0])
if not os.path.exists(image_path) and "guid" in request.args:
node = None
for connection in self.protocol.values():
if connection.handler.node is not None and \
connection.handler.node.id == unhexlify(request.args["guid"][0]):
node = connection.handler.node
self.mserver.get_image(node, unhexlify(request.args["hash"][0])).addCallback(_showImage)
if node is None:
_showImage()
else:
_showImage()
else:
request.write(NoResource().render(request))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/profile')
@authenticated
def get_profile(self, request):
def parse_profile(profile, temp_handle=None):
if profile is not None:
profile_json = {
"profile": {
"name": profile.name,
"location": str(CountryCode.Name(profile.location)),
"public_key": profile.guid_key.public_key.encode("hex"),
"nsfw": profile.nsfw,
"vendor": profile.vendor,
"moderator": profile.moderator,
"moderation_fee": round(profile.moderation_fee, 2),
"handle": profile.handle,
"about": profile.about,
"short_description": profile.short_description,
"website": profile.website,
"email": profile.email,
"primary_color": profile.primary_color,
"secondary_color": profile.secondary_color,
"background_color": profile.background_color,
"text_color": profile.text_color,
"pgp_key": profile.pgp_key.public_key,
"avatar_hash": profile.avatar_hash.encode("hex"),
"header_hash": profile.header_hash.encode("hex"),
"social_accounts": {}
}
}
if temp_handle:
profile_json["profile"]["temp_handle"] = temp_handle
if "guid" in request.args:
profile_json["profile"]["guid"] = request.args["guid"][0]
else:
profile_json["profile"]["guid"] = self.keychain.guid.encode("hex")
for account in profile.social:
profile_json["profile"]["social_accounts"][str(
objects.Profile.SocialAccount.SocialType.Name(account.type)).lower()] = {
"username": account.username,
"proof_url": account.proof_url
}
if (profile.handle is not "" and "(unconfirmed)" not in profile.handle and
not blockchainid.validate(profile.handle, profile_json["profile"]["guid"])):
profile_json["profile"]["handle"] = ""
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(profile_json), indent=4))
request.finish()
else:
request.write(json.dumps({}))
request.finish()
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.get_profile(node).addCallback(parse_profile)
else:
request.write(json.dumps({}))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
p = Profile(self.db).get()
if not p.HasField("guid_key"):
request.write(json.dumps({}))
request.finish()
else:
temp_handle = self.db.profile.get_temp_handle()
parse_profile(p, None if temp_handle == "" else temp_handle)
return server.NOT_DONE_YET
@GET('^/api/v1/get_listings')
@authenticated
def get_listings(self, request):
def parse_listings(listings):
if listings is not None:
response = {"listings": []}
for l in listings.listing:
listing_json = {
"title": l.title,
"contract_hash": l.contract_hash.encode("hex"),
"thumbnail_hash": l.thumbnail_hash.encode("hex"),
"category": l.category,
"price": l.price,
"currency_code": l.currency_code,
"nsfw": l.nsfw,
"origin": str(CountryCode.Name(l.origin)),
"ships_to": []
}
if l.contract_type != 0:
listing_json["contract_type"] = str(objects.Listings.ContractType.Name(l.contract_type))
for country in l.ships_to:
listing_json["ships_to"].append(str(CountryCode.Name(country)))
response["listings"].append(listing_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(response), indent=4))
request.finish()
else:
request.write(json.dumps({}))
request.finish()
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.get_listings(node).addCallback(parse_listings)
else:
request.write(json.dumps({}))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
ser = self.db.listings.get_proto()
if ser is not None:
l = objects.Listings()
l.ParseFromString(ser)
parse_listings(l)
else:
parse_listings(None)
return server.NOT_DONE_YET
@GET('^/api/v1/get_followers')
@authenticated
def get_followers(self, request):
def parse_followers(followers):
if followers[0] is not None:
response = {"followers": []}
for f in followers[0].followers:
follower_json = {
"guid": f.guid.encode("hex"),
"handle": f.metadata.handle,
"name": f.metadata.name,
"avatar_hash": f.metadata.avatar_hash.encode("hex"),
"short_description": f.metadata.short_description,
"nsfw": f.metadata.nsfw
}
response["followers"].append(follower_json)
if followers[1] is not None:
response["count"] = followers[1]
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(response), indent=4))
request.finish()
else:
request.write(json.dumps({}))
request.finish()
start = 0
if "start" in request.args:
start = int(request.args["start"][0])
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.get_followers(node, start).addCallback(parse_followers)
else:
request.write(json.dumps({}))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
ser = self.db.follow.get_followers(start)
if ser[0] is not None:
f = objects.Followers()
f.ParseFromString(ser[0])
parse_followers((f, ser[1]))
else:
parse_followers((None, 0))
return server.NOT_DONE_YET
@GET('^/api/v1/get_following')
@authenticated
def get_following(self, request):
def parse_following(following):
if following is not None:
response = {"following": []}
for f in following.users:
user_json = {
"guid": f.guid.encode("hex"),
"handle": f.metadata.handle,
"name": f.metadata.name,
"avatar_hash": f.metadata.avatar_hash.encode("hex"),
"short_description": f.metadata.short_description,
"nsfw": f.metadata.nsfw
}
response["following"].append(user_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(response), indent=4))
request.finish()
else:
request.write(json.dumps({}))
request.finish()
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.get_following(node).addCallback(parse_following)
else:
request.write(json.dumps({}))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
ser = self.db.follow.get_following()
if ser is not None:
f = objects.Following()
f.ParseFromString(ser)
parse_following(f)
else:
parse_following(None)
return server.NOT_DONE_YET
@POST('^/api/v1/follow')
@authenticated
def follow(self, request):
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.follow(node)
request.write(json.dumps({"success": True}))
request.finish()
else:
request.write(json.dumps({"success": False, "reason": "could not resolve guid"}, indent=4))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
return server.NOT_DONE_YET
@POST('^/api/v1/unfollow')
@authenticated
def unfollow(self, request):
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.unfollow(node)
request.write(json.dumps({"success": True}))
request.finish()
else:
request.write(json.dumps({"success": False, "reason": "could not resolve guid"}, indent=4))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
return server.NOT_DONE_YET
# pylint: disable=R0201
@POST('^/api/v1/profile')
@authenticated
def update_profile(self, request):
try:
p = Profile(self.db)
can_update_profile = (p.get().HasField("guid_key") or
("name" in request.args and
"location" in request.args))
if not can_update_profile:
request_dict = {
"success": False,
"reason": "name or location not included"
}
request.write(json.dumps(request_dict, indent=4))
request.finish()
return False
u = objects.Profile()
if "name" in request.args:
u.name = request.args["name"][0].decode("utf8")
if "location" in request.args:
# This needs to be formatted. Either here or from the UI.
u.location = CountryCode.Value(request.args["location"][0].upper())
if "handle" in request.args:
if blockchainid.validate(request.args["handle"][0], self.keychain.guid.encode("hex")):
u.handle = request.args["handle"][0].decode("utf8")
self.db.profile.set_temp_handle("")
else:
u.handle = ""
self.db.profile.set_temp_handle(request.args["handle"][0].decode("utf8"))
if "about" in request.args:
u.about = request.args["about"][0].decode("utf8")
if "short_description" in request.args:
u.short_description = request.args["short_description"][0].decode("utf8")
if "nsfw" in request.args:
p.profile.nsfw = str_to_bool(request.args["nsfw"][0])
if "vendor" in request.args:
p.profile.vendor = str_to_bool(request.args["vendor"][0])
if "moderator" in request.args:
p.profile.moderator = str_to_bool(request.args["moderator"][0])
if "moderation_fee" in request.args:
p.profile.moderation_fee = round(float(request.args["moderation_fee"][0]), 2)
if "website" in request.args:
u.website = request.args["website"][0].decode("utf8")
if "email" in request.args:
u.email = request.args["email"][0].decode("utf8")
if "primary_color" in request.args:
p.profile.primary_color = int(request.args["primary_color"][0])
if "secondary_color" in request.args:
p.profile.secondary_color = int(request.args["secondary_color"][0])
if "background_color" in request.args:
p.profile.background_color = int(request.args["background_color"][0])
if "text_color" in request.args:
p.profile.text_color = int(request.args["text_color"][0])
if "avatar" in request.args:
u.avatar_hash = unhexlify(request.args["avatar"][0])
if "header" in request.args:
u.header_hash = unhexlify(request.args["header"][0])
if "pgp_key" in request.args and "signature" in request.args:
p.add_pgp_key(request.args["pgp_key"][0], request.args["signature"][0],
self.keychain.guid.encode("hex"))
if not p.get().HasField("guid_key"):
key = u.PublicKey()
key.public_key = self.keychain.verify_key.encode()
key.signature = self.keychain.signing_key.sign(key.public_key)[:64]
u.guid_key.MergeFrom(key)
p.update(u)
request.write(json.dumps({"success": True}))
request.finish()
self.kserver.node.vendor = p.get().vendor
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/social_accounts')
@authenticated
def add_social_account(self, request):
try:
p = Profile(self.db)
if "account_type" in request.args and "username" in request.args:
p.add_social_account(request.args["account_type"][0].decode("utf8"),
request.args["username"][0].decode("utf8"),
request.args["proof"][0].decode("utf8") if
"proof" in request.args else None)
else:
raise Exception("Missing required fields")
request.write(json.dumps({"success": True}))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@DELETE('^/api/v1/social_accounts')
@authenticated
def delete_social_account(self, request):
try:
p = Profile(self.db)
if "account_type" in request.args:
p.remove_social_account(request.args["account_type"][0])
request.write(json.dumps({"success": True}))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/contracts')
@authenticated
def get_contract(self, request):
def parse_contract(contract):
if contract is not None:
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(contract), indent=4))
request.finish()
else:
request.write(json.dumps({}))
request.finish()
if "id" in request.args and len(request.args["id"][0]) == 40:
if "guid" in request.args and len(request.args["guid"][0]) == 40:
def get_node(node):
if node is not None:
self.mserver.get_contract(node, unhexlify(request.args["id"][0]))\
.addCallback(parse_contract)
else:
request.write(json.dumps({}))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
try:
with open(self.db.filemap.get_file(request.args["id"][0]), "r") as filename:
contract = json.loads(filename.read(), object_pairs_hook=OrderedDict)
parse_contract(contract)
except Exception:
parse_contract(None)
else:
request.write(json.dumps({}))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/contracts')
@authenticated
def set_contract(self, request):
try:
if "options" in request.args:
options = {}
for option in request.args["options"]:
options[option.decode("utf8")] = request.args[option.decode("utf8")]
keywords = None
if "keywords" in request.args:
keywords = []
for keyword in request.args["keywords"]:
keywords.append(keyword.decode("utf8"))
if len(keywords) > 10:
raise Exception("Too many keywords")
if "contract_id" in request.args:
c = Contract(self.db, hash_value=unhexlify(request.args["contract_id"][0]),
testnet=self.protocol.testnet)
else:
c = Contract(self.db, testnet=self.protocol.testnet)
c.create(
str(request.args["expiration_date"][0]),
request.args["metadata_category"][0],
request.args["title"][0].decode("utf8"),
request.args["description"][0].decode("utf8"),
request.args["currency_code"][0],
request.args["price"][0],
request.args["process_time"][0].decode("utf8"),
str_to_bool(request.args["nsfw"][0]),
shipping_origin=request.args["shipping_origin"][0] if "shipping_origin" in request.args else None,
shipping_regions=request.args["ships_to"] if "ships_to" in request.args else None,
est_delivery_domestic=request.args["est_delivery_domestic"][0].decode("utf8")
if "est_delivery_domestic" in request.args else None,
est_delivery_international=request.args["est_delivery_international"][0].decode("utf8")
if "est_delivery_international" in request.args else None,
terms_conditions=request.args["terms_conditions"][0].decode("utf8")
if request.args["terms_conditions"][0] is not "" else None,
returns=request.args["returns"][0].decode("utf8")
if request.args["returns"][0] is not "" else None,
shipping_currency_code=request.args["shipping_currency_code"][0],
shipping_domestic=request.args["shipping_domestic"][0],
shipping_international=request.args["shipping_international"][0],
keywords=keywords,
category=request.args["category"][0].decode("utf8")
if request.args["category"][0] is not "" else None,
condition=request.args["condition"][0].decode("utf8")
if request.args["condition"][0] is not "" else None,
sku=request.args["sku"][0].decode("utf8") if request.args["sku"][0] is not "" else None,
images=request.args["images"],
free_shipping=str_to_bool(request.args["free_shipping"][0]),
options=options if "options" in request.args else None,
moderators=request.args["moderators"] if "moderators" in request.args else None,
contract_id=request.args["contract_id"][0] if "contract_id" in request.args else None)
for keyword in request.args["keywords"]:
if keyword != "":
self.kserver.set(digest(keyword.lower()), unhexlify(c.get_contract_id()),
self.kserver.node.getProto().SerializeToString())
request.write(json.dumps({"success": True, "id": c.get_contract_id()}))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@DELETE('^/api/v1/contracts')
@authenticated
def delete_contract(self, request):
try:
if "id" in request.args:
file_path = self.db.filemap.get_file(request.args["id"][0])
with open(file_path, 'r') as filename:
contract = json.load(filename, object_pairs_hook=OrderedDict)
c = Contract(self.db, contract=contract)
if "keywords" in c.contract["vendor_offer"]["listing"]["item"]:
for keyword in c.contract["vendor_offer"]["listing"]["item"]["keywords"]:
if keyword != "":
if isinstance(keyword, unicode):
keyword = keyword.encode('utf8')
self.kserver.delete(keyword.lower(), unhexlify(c.get_contract_id()),
self.keychain.signing_key.sign(
unhexlify(c.get_contract_id()))[:64])
if "delete_images" in request.args:
c.delete(delete_images=True)
else:
c.delete()
request.write(json.dumps({"success": True}))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/shutdown')
def shutdown(self, request):
session = request.getSession()
if session not in self.authenticated_sessions and request.getHost().host != "127.0.0.1":
session.expire()
request.setResponseCode(401)
request.write('<html><body><div><span style="color:red">Authorization Error</span></div>'
'<h2>Permission Denied</h2></body></html>')
request.finish()
return server.NOT_DONE_YET
else:
for vendor in self.protocol.vendors.values():
self.db.vendors.save_vendor(vendor.id.encode("hex"), vendor.getProto().SerializeToString())
PortMapper().clean_my_mappings(self.kserver.node.port)
self.protocol.shutdown()
reactor.stop()
return
@POST('^/api/v1/make_moderator')
@authenticated
def make_moderator(self, request):
try:
self.mserver.make_moderator()
request.write(json.dumps({"success": True}))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/unmake_moderator')
@authenticated
def unmake_moderator(self, request):
try:
self.mserver.unmake_moderator()
request.write(json.dumps({"success": True}))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/purchase_contract')
@authenticated
def purchase_contract(self, request):
try:
def handle_response(resp, contract):
if resp:
contract.await_funding(self.mserver.protocol.get_notification_listener(),
self.protocol.blockchain, resp)
request.write(json.dumps({"success": True, "payment_address": payment[0],
"amount": payment[1],
"order_id": c.get_order_id()},
indent=4))
request.finish()
else:
request.write(json.dumps({"success": False, "reason": "vendor rejected contract"}, indent=4))
request.finish()
options = None
if "options" in request.args:
options = {}
for option in request.args["options"]:
options[option] = request.args[option][0]
c = Contract(self.db, hash_value=unhexlify(request.args["id"][0]), testnet=self.protocol.testnet)
payment = c.\
add_purchase_info(int(request.args["quantity"][0]),
request.args["refund_address"][0],
request.args["ship_to"][0].decode("utf8")
if "ship_to" in request.args else None,
request.args["address"][0].decode("utf8")
if "address" in request.args else None,
request.args["city"][0].decode("utf8")
if "city" in request.args else None,
request.args["state"][0].decode("utf8")
if "state" in request.args else None,
request.args["postal_code"][0].decode("utf8")
if "postal_code" in request.args else None,
request.args["country"][0].decode("utf8")
if "country" in request.args else None,
request.args["moderator"][0] if "moderator" in request.args else None,
options)
def get_node(node):
if node is not None:
self.mserver.purchase(node, c).addCallback(handle_response, c)
else:
request.write(json.dumps({"success": False, "reason": "unable to reach vendor"}, indent=4))
request.finish()
vendor_guid = unhexlify(c.contract["vendor_offer"]["listing"]["id"]["guid"])
self.kserver.resolve(vendor_guid).addCallback(get_node)
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/confirm_order')
@authenticated
def confirm_order(self, request):
try:
def respond(success):
if success is True:
request.write(json.dumps({"success": True}))
request.finish()
else:
request.write(json.dumps({"success": False, "reason": success}))
request.finish()
file_name = request.args["id"][0] + ".json"
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "in progress", file_name)
with open(file_path, 'r') as filename:
order = json.load(filename, object_pairs_hook=OrderedDict)
c = Contract(self.db, contract=order, testnet=self.protocol.testnet)
if "vendor_order_confirmation" not in c.contract:
c.add_order_confirmation(self.protocol.blockchain,
request.args["payout_address"][0],
comments=request.args["comments"][0].decode("utf8")
if "comments" in request.args else None,
shipper=request.args["shipper"][0].decode("utf8")
if "shipper" in request.args else None,
tracking_number=request.args["tracking_number"][0].decode("utf8")
if "tracking_number" in request.args else None,
est_delivery=request.args["est_delivery"][0].decode("utf8")
if "est_delivery" in request.args else None,
url=request.args["url"][0].decode("utf8")
if "url" in request.args else None,
password=request.args["password"][0].decode("utf8")
if "password" in request.args else None)
guid = c.contract["buyer_order"]["order"]["id"]["guid"]
self.mserver.confirm_order(guid, c).addCallback(respond)
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/upload_image')
@authenticated
def upload_image(self, request):
try:
ret = []
if "image" in request.args:
for image in request.args["image"]:
img = image.decode('base64')
hash_value = digest(img).encode("hex")
with open(os.path.join(DATA_FOLDER, "store", "media", hash_value), 'wb') as outfile:
outfile.write(img)
self.db.filemap.insert(hash_value, os.path.join("store", "media", hash_value))
ret.append(hash_value)
elif "avatar" in request.args:
avi = request.args["avatar"][0].decode("base64")
hash_value = digest(avi).encode("hex")
with open(os.path.join(DATA_FOLDER, "store", "avatar"), 'wb') as outfile:
outfile.write(avi)
self.db.filemap.insert(hash_value, os.path.join("store", "avatar"))
ret.append(hash_value)
elif "header" in request.args:
hdr = request.args["header"][0].decode("base64")
hash_value = digest(hdr).encode("hex")
with open(os.path.join(DATA_FOLDER, "store", "header"), 'wb') as outfile:
outfile.write(hdr)
self.db.filemap.insert(hash_value, os.path.join("store", "header"))
ret.append(hash_value)
request.write(json.dumps({"success": True, "image_hashes": ret}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/complete_order')
@authenticated
def complete_order(self, request):
def respond(success):
if success is True:
request.write(json.dumps({"success": True}))
request.finish()
else:
request.write(json.dumps({"success": False, "reason": success}))
request.finish()
file_path = os.path.join(DATA_FOLDER, "purchases", "in progress", request.args["id"][0] + ".json")
if not os.path.exists(file_path):
file_path = os.path.join(DATA_FOLDER, "purchases", "trade receipts", request.args["id"][0] + ".json")
with open(file_path, 'r') as filename:
order = json.load(filename, object_pairs_hook=OrderedDict)
c = Contract(self.db, contract=order, testnet=self.protocol.testnet)
if "buyer_receipt" not in c.contract:
c.add_receipt(True,
self.protocol.blockchain,
feedback=request.args["feedback"][0] if "feedback" in request.args else None,
quality=request.args["quality"][0] if "quality" in request.args else None,
description=request.args["description"][0] if "description" in request.args else None,
delivery_time=request.args["delivery_time"][0]
if "delivery_time" in request.args else None,
customer_service=request.args["customer_service"][0]
if "customer_service" in request.args else None,
review=request.args["review"][0].decode("utf8") if "review" in request.args else "",
anonymous=str_to_bool(request.args["anonymous"]) if "anonymous" in request.args else True)
guid = c.contract["vendor_offer"]["listing"]["id"]["guid"]
self.mserver.complete_order(guid, c).addCallback(respond)
return server.NOT_DONE_YET
@POST('^/api/v1/settings')
@authenticated
def set_settings(self, request):
try:
settings = self.db.settings
resolver = RESOLVER if "resolver" not in request.args or request.args["resolver"][0] == "" \
else request.args["resolver"][0]
if "libbitcoin_server" in request.args and \
request.args["libbitcoin_server"][0] != "" and \
request.args["libbitcoin_server"][0] != "null":
if self.protocol.testnet:
set_value("LIBBITCOIN_SERVERS_TESTNET", "testnet_server_custom",
request.args["libbitcoin_server"][0])
else:
set_value("LIBBITCOIN_SERVERS", "mainnet_server_custom",
request.args["libbitcoin_server"][0])
else:
if self.protocol.testnet:
if get_value("LIBBITCOIN_SERVERS_TESTNET", "testnet_server_custom"):
delete_value("LIBBITCOIN_SERVERS_TESTNET", "testnet_server_custom")
else:
if get_value("LIBBITCOIN_SERVERS", "mainnet_server_custom"):
delete_value("LIBBITCOIN_SERVERS", "mainnet_server_custom")
if resolver != get_value("CONSTANTS", "RESOLVER"):
set_value("CONSTANTS", "RESOLVER", resolver)
if "smtp_notifications" not in request.args:
request.args["smtp_notifications"] = ['false']
smtp_attrs = ["smtp_server", "smtp_sender", "smtp_recipient", "smtp_username", "smtp_password"]
for smtp_attr in smtp_attrs:
if smtp_attr not in request.args:
request.args[smtp_attr] = ['']
settings_list = settings.get()
if "moderators" in request.args and settings_list is not None:
mod_json = settings_list[11]
if mod_json != "":
prev_mods = json.loads(mod_json)
current_mods = request.args["moderators"]
to_add = list(set(current_mods) - set(prev_mods))
to_remove = list(set(prev_mods) - set(current_mods))
if len(to_remove) > 0 or len(to_add) > 0:
self.mserver.update_moderators_on_listings(request.args["moderators"])
settings.update(
request.args["refund_address"][0],
request.args["currency_code"][0],
request.args["country"][0],
request.args["language"][0],
request.args["time_zone"][0],
1 if str_to_bool(request.args["notifications"][0]) else 0,
json.dumps(request.args["shipping_addresses"] if request.args["shipping_addresses"] != "" else []),
json.dumps(request.args["blocked"] if request.args["blocked"] != "" else []),
request.args["terms_conditions"][0],
request.args["refund_policy"][0],
json.dumps(request.args["moderators"] if request.args["moderators"] != "" else []),
1 if str_to_bool(request.args["smtp_notifications"][0]) else 0,
request.args["smtp_server"][0],
request.args["smtp_sender"][0],
request.args["smtp_recipient"][0],
request.args["smtp_username"][0],
request.args["smtp_password"][0]
)
request.write(json.dumps({"success": True}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/settings')
@authenticated
def get_settings(self, request):
settings = self.db.settings.get()
if settings is None:
request.write(json.dumps({}, indent=4))
request.finish()
else:
if self.protocol.nat_type == objects.FULL_CONE:
nat_type = "Open"
elif self.protocol.nat_type == objects.RESTRICTED:
nat_type = "Restricted"
else:
nat_type = "Severely Restricted"
settings_json = {
"refund_address": settings[1],
"currency_code": settings[2],
"country": settings[3],
"language": settings[4],
"time_zone": settings[5],
"notifications": True if settings[6] == 1 else False,
"shipping_addresses": json.loads(settings[7]),
"blocked_guids": json.loads(settings[8]),
"libbitcoin_server": get_value(
"LIBBITCOIN_SERVERS_TESTNET", "testnet_server_custom")if self.protocol.testnet else get_value(
"LIBBITCOIN_SERVERS", "mainnet_server_custom"),
"seed": KeyChain(self.db).signing_key.encode(encoder=nacl.encoding.HexEncoder),
"terms_conditions": "" if settings[9] is None else settings[9],
"refund_policy": "" if settings[10] is None else settings[10],
"resolver": get_value("CONSTANTS", "RESOLVER"),
"network_connection": nat_type,
"transaction_fee": TRANSACTION_FEE,
"smtp_notifications": True if settings[14] == 1 else False,
"smtp_server": settings[15],
"smtp_sender": settings[16],
"smtp_recipient": settings[17],
"smtp_username": settings[18],
"smtp_password": settings[19],
}
mods = []
try:
for guid in json.loads(settings[11]):
info = self.db.moderators.get_moderator(guid)
if info is not None:
m = {
"guid": guid,
"handle": info[4],
"name": info[5],
"avatar_hash": info[7].encode("hex"),
"short_description": info[6],
"fee": info[8]
}
mods.append(m)
except Exception:
pass
settings_json["moderators"] = mods
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(settings_json), indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/connected_peers')
@authenticated
def get_connected_peers(self, request):
request.setHeader('content-type', "application/json")
peers = self.protocol.keys()
resp = {
"num_peers": len(peers),
"peers": peers
}
request.write(json.dumps(sanitize_html(resp), indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/routing_table')
@authenticated
def get_routing_table(self, request):
nodes = []
for bucket in self.kserver.protocol.router.buckets:
for node in bucket.nodes.values():
n = {
"guid": node.id.encode("hex"),
"ip": node.ip,
"port": node.port,
"vendor": node.vendor,
"nat_type": objects.NATType.Name(node.nat_type)
}
nodes.append(n)
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(nodes), indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_notifications')
@authenticated
def get_notifications(self, request):
limit = int(request.args["limit"][0]) if "limit" in request.args else 20
start = request.args["start"][0] if "start" in request.args else ""
notifications = self.db.notifications.get_notifications(start, limit)
notification_dict = {
"unread": self.db.notifications.get_unread_count(),
"notifications": []
}
for n in notifications[::-1]:
notification_json = {
"id": n[0],
"guid": n[1],
"handle": n[2],
"type": n[3],
"order_id": n[4],
"title": n[5],
"timestamp": n[6],
"image_hash": n[7].encode("hex"),
"read": False if n[8] == 0 else True
}
notification_dict["notifications"].append(notification_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(notification_dict), indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/mark_notification_as_read')
@authenticated
def mark_notification_as_read(self, request):
try:
for notif_id in request.args["id"]:
self.db.notifications.mark_as_read(notif_id)
request.write(json.dumps({"success": True}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/broadcast')
@authenticated
def broadcast(self, request):
try:
def get_response(num):
request.write(json.dumps({"success": True, "peers reached": num}, indent=4))
request.finish()
self.mserver.broadcast(request.args["message"][0]).addCallback(get_response)
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_chat_messages')
@authenticated
def get_chat_messages(self, request):
start = request.args["start"][0] if "start" in request.args else None
messages = self.db.messages.get_messages(request.args["guid"][0], "CHAT", start)
message_list = []
for m in messages[::-1]:
message_json = {
"id": m[11],
"guid": m[0],
"handle": m[1],
"message": m[5],
"timestamp": m[6],
"avatar_hash": m[7].encode("hex"),
"outgoing": False if m[9] == 0 else True,
"read": False if m[10] == 0 else True
}
message_list.append(message_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(message_list), indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_chat_conversations')
@authenticated
def get_chat_conversations(self, request):
messages = self.db.messages.get_conversations()
request.setHeader('content-type', "application/json")
request.write(json.dumps(messages, indent=4).encode("utf-8"))
request.finish()
return server.NOT_DONE_YET
@DELETE('^/api/v1/chat_conversation')
@authenticated
def delete_conversations(self, request):
try:
self.db.messages.delete_messages(request.args["guid"][0])
request.write(json.dumps({"success": True}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/mark_chat_message_as_read')
@authenticated
def mark_chat_message_as_read(self, request):
try:
self.db.messages.mark_as_read(request.args["guid"][0])
request.write(json.dumps({"success": True}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_sales')
@authenticated
def get_sales(self, request):
sales = self.db.sales.get_all()
sales_list = []
for sale in sales:
sale_json = {
"order_id": sale[0],
"title": sale[1],
"description": sale[2],
"timestamp": sale[3],
"btc_total": sale[4],
"status": sale[5],
"thumbnail_hash": sale[6],
"buyer": sale[7],
"contract_type": sale[8],
"unread": sale[9]
}
sales_list.append(sale_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(sales_list), indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_purchases')
@authenticated
def get_purchases(self, request):
purchases = self.db.purchases.get_all()
purchases_list = []
for purchase in purchases:
purchase_json = {
"order_id": purchase[0],
"title": purchase[1],
"description": purchase[2],
"timestamp": purchase[3],
"btc_total": purchase[4],
"status": purchase[5],
"thumbnail_hash": purchase[6],
"vendor": purchase[7],
"contract_type": purchase[8],
"unread": purchase[9]
}
purchases_list.append(purchase_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(purchases_list), indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/check_for_payment')
@authenticated
def check_for_payment(self, request):
if not self.protocol.blockchain.connected:
request.write(json.dumps({"success": False, "reason": "libbitcoin server offline"}, indent=4))
request.finish()
return server.NOT_DONE_YET
try:
check_order_for_payment(request.args["order_id"][0], self.db,
self.protocol.blockchain,
self.mserver.protocol.get_notification_listener(),
self.protocol.testnet)
request.write(json.dumps({"success": True}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_order')
@authenticated
def get_order(self, request):
#TODO: if this is either a funded direct payment sale or complete moderated sale but
#TODO: the payout tx has not hit the blockchain, rebroadcast.
filename = request.args["order_id"][0] + ".json"
if os.path.exists(os.path.join(DATA_FOLDER, "purchases", "unfunded", filename)):
file_path = os.path.join(DATA_FOLDER, "purchases", "unfunded", filename)
status = self.db.purchases.get_status(request.args["order_id"][0])
elif os.path.exists(os.path.join(DATA_FOLDER, "purchases", "in progress", filename)):
file_path = os.path.join(DATA_FOLDER, "purchases", "in progress", filename)
status = self.db.purchases.get_status(request.args["order_id"][0])
elif os.path.exists(os.path.join(DATA_FOLDER, "purchases", "trade receipts", filename)):
file_path = os.path.join(DATA_FOLDER, "purchases", "trade receipts", filename)
status = self.db.purchases.get_status(request.args["order_id"][0])
elif os.path.exists(os.path.join(DATA_FOLDER, "store", "contracts", "unfunded", filename)):
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "unfunded", filename)
status = self.db.sales.get_status(request.args["order_id"][0])
elif os.path.exists(os.path.join(DATA_FOLDER, "store", "contracts", "in progress", filename)):
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "in progress", filename)
status = self.db.sales.get_status(request.args["order_id"][0])
elif os.path.exists(os.path.join(DATA_FOLDER, "store", "contracts", "trade receipts", filename)):
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "trade receipts", filename)
status = self.db.sales.get_status(request.args["order_id"][0])
elif os.path.exists(os.path.join(DATA_FOLDER, "cases", filename)):
file_path = os.path.join(DATA_FOLDER, "cases", filename)
status = 4
else:
request.write(json.dumps({}, indent=4))
request.finish()
return server.NOT_DONE_YET
with open(file_path, 'r') as filename:
order = json.load(filename, object_pairs_hook=OrderedDict)
if status == 0 or status == 2:
check_order_for_payment(request.args["order_id"][0], self.db, self.protocol.blockchain,
self.mserver.protocol.get_notification_listener(),
self.protocol.testnet)
def return_order():
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(order), indent=4))
request.finish()
def height_fetched(ec, chain_height):
payment_address = order["buyer_order"]["order"]["payment"]["address"]
txs = []
def history_fetched(ec, history):
if ec:
return_order()
elif timeout.active():
timeout.cancel()
for tx_type, txid, i, height, value in history: # pylint: disable=W0612
tx = {
"txid": txid.encode("hex"),
"value": round(float(value) / 100000000, 8),
"confirmations": chain_height - height + 1 if height != 0 else 0
}
if tx_type == obelisk.PointIdent.Output:
tx["type"] = "incoming"
else:
tx["type"] = "outgoing"
txs.append(tx)
order["bitcoin_txs"] = txs
request.setHeader('content-type', "application/json")
request.write(json.dumps(order, indent=4))
request.finish()
self.protocol.blockchain.fetch_history2(payment_address, history_fetched)
if self.protocol.blockchain.connected:
self.protocol.blockchain.fetch_last_height(height_fetched)
timeout = reactor.callLater(4, return_order)
else:
return_order()
return server.NOT_DONE_YET
@POST('^/api/v1/dispute_contract')
@authenticated
def dispute_contract(self, request):
try:
self.mserver.open_dispute(request.args["order_id"][0],
request.args["claim"][0].decode("utf8") if "claim" in request.args else None)
request.write(json.dumps({"success": True}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/close_dispute')
@authenticated
def close_dispute(self, request):
try:
def cb(resp):
if resp:
request.write(json.dumps({"success": True}, indent=4))
request.finish()
else:
request.write(json.dumps({"success": False, "reason": resp}, indent=4))
request.finish()
d = self.mserver.close_dispute(request.args["order_id"][0],
request.args["resolution"][0].decode("utf8")
if "resolution" in request.args else None,
request.args["buyer_percentage"][0]
if "buyer_percentage" in request.args else None,
request.args["vendor_percentage"][0]
if "vendor_percentage" in request.args else None,
request.args["moderator_percentage"][0]
if "moderator_percentage" in request.args else None,
request.args["moderator_address"][0]
if "moderator_address" in request.args else None)
d.addCallback(cb)
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/release_funds')
@authenticated
def release_funds(self, request):
try:
self.mserver.release_funds(request.args["order_id"][0])
request.write(json.dumps({"success": True}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_cases')
@authenticated
def get_cases(self, request):
cases = self.db.cases.get_all()
cases_list = []
for case in cases:
purchase_json = {
"order_id": case[0],
"title": case[1],
"timestamp": case[2],
"order_date": case[3],
"btc_total": case[4],
"thumbnail_hash": case[5],
"buyer": case[6],
"vendor": case[7],
"validation": json.loads(case[8]),
"status": "closed" if case[10] == 1 else "open",
"unread": case[11]
}
cases_list.append(purchase_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(cases_list), indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/order_messages')
@authenticated
def order_messages(self, request):
message_list = []
messages = self.db.messages.get_order_messages(request.args["order_id"][0])
for m in messages:
if m[0] is not None:
message_json = {
"guid": m[0],
"handle": m[1],
"message": m[5],
"timestamp": m[6],
"avatar_hash": m[7].encode("hex"),
"message_type": m[4],
"outgoing": False if m[9] == 0 else True
}
message_list.append(message_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(message_list), indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_ratings')
@authenticated
def get_ratings(self, request):
def parse_response(ratings):
if ratings is not None:
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(ratings), indent=4))
request.finish()
else:
request.write(json.dumps({}))
request.finish()
if "guid" in request.args:
def get_node(node):
if node is not None:
if "contract_id" in request.args and request.args["contract_id"][0] != "":
self.mserver.get_ratings(node, unhexlify(request.args["contract_id"][0]))\
.addCallback(parse_response)
else:
self.mserver.get_ratings(node).addCallback(parse_response)
else:
request.write(json.dumps({}))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
ratings = []
if "contract_id" in request.args and request.args["contract_id"][0] != "":
for rating in self.db.ratings.get_listing_ratings(request.args["contract_id"][0]):
ratings.append(json.loads(rating[0]))
else:
for rating in self.db.ratings.get_all_ratings():
ratings.append(json.loads(rating[0]))
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(ratings), indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/btc_price')
@authenticated
def btc_price(self, request):
request.setHeader('content-type', "application/json")
if "currency" in request.args:
try:
result = BtcPrice.instance().get(request.args["currency"][0].upper(), False)
request.write(json.dumps({"btcExchange":result, "currencyCodes":BtcPrice.instance().prices}))
request.finish()
return server.NOT_DONE_YET
except KeyError:
pass
request.write(json.dumps({"currencyCodes": BtcPrice.instance().prices}))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/refund')
@authenticated
def refund(self, request):
try:
def respond(success):
if success is True:
request.write(json.dumps({"success": True}))
request.finish()
else:
request.write(json.dumps({"success": False, "reason": success}))
request.finish()
self.mserver.refund(request.args["order_id"][0]).addCallback(respond)
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/mark_discussion_as_read')
@authenticated
def mark_discussion_as_read(self, request):
try:
self.db.purchases.update_unread(request.args["id"][0], reset=True)
self.db.sales.update_unread(request.args["id"][0], reset=True)
self.db.cases.update_unread(request.args["id"][0], reset=True)
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
class RestAPI(Site):
def __init__(self, mserver, kserver, openbazaar_protocol, username, password,
authenticated_sessions, only_ip=None, timeout=60 * 60 * 1):
if only_ip == None:
only_ip = ["127.0.0.1"]
self.only_ip = only_ip
api_resource = OpenBazaarAPI(mserver, kserver, openbazaar_protocol,
username, password, authenticated_sessions)
Site.__init__(self, api_resource, timeout=timeout)
def buildProtocol(self, addr):
if addr.host not in self.only_ip and "0.0.0.0" not in self.only_ip:
return
return Site.buildProtocol(self, addr)
| {
"repo_name": "tomgalloway/OpenBazaar-Server",
"path": "api/restapi.py",
"copies": "1",
"size": "68298",
"license": "mit",
"hash": -6782258488951879000,
"line_mean": 45.907967033,
"line_max": 116,
"alpha_frac": 0.525520513,
"autogenerated": false,
"ratio": 4.257980049875312,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5283500562875312,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import json
import os
import obelisk
import nacl.encoding
import time
from binascii import unhexlify
from collections import OrderedDict
from functools import wraps
from txrestapi.resource import APIResource
from txrestapi.methods import GET, POST, DELETE
from twisted.web import server
from twisted.web.resource import NoResource
from twisted.web import http
from twisted.web.server import Site
from twisted.internet import defer, reactor, task
from twisted.protocols.basic import FileSender
from config import DATA_FOLDER, RESOLVER, delete_value, set_value, get_value, str_to_bool, TRANSACTION_FEE, \
SERVER_VERSION
from protos.countries import CountryCode
from protos import objects
from keys import blockchainid
from keys.keychain import KeyChain
from dht.utils import digest
from market.profile import Profile
from market.contracts import Contract, check_order_for_payment
from market.btcprice import BtcPrice
from net.upnp import PortMapper
from api.utils import sanitize_html
from market.migration import migratev2
from twisted.web import static
DEFAULT_RECORDS_COUNT = 20
DEFAULT_RECORDS_OFFSET = 0
class OpenBazaarAPI(APIResource):
"""
This RESTful API allows clients to pull relevant data from the
OpenBazaar daemon for use in a GUI or other application.
"""
# pylint: disable=E0213, E1102
def authenticated(func):
def _authenticate(self, request):
session = request.getSession()
if session not in self.authenticated_sessions and "localhost" not in self.authenticated_sessions:
session.expire()
request.setResponseCode(401)
request.write('<html><body><div><span style="color:red">Authorization Error</span></div>'
'<h2>Permission Denied</h2></body></html>')
request.finish()
return server.NOT_DONE_YET
else:
if request.getHeader("Content-Type") == "application/json":
request.args = json.loads(request.content.read())
func(self, request)
return server.NOT_DONE_YET
return wraps(func)(_authenticate)
def __init__(self, mserver, kserver, protocol, username, password, authenticated_sessions):
self.mserver = mserver
self.kserver = kserver
self.protocol = protocol
self.db = mserver.db
self.keychain = KeyChain(self.db)
self.username = username
self.password = password
self.authenticated_sessions = authenticated_sessions
self.failed_login_attempts = {}
task.LoopingCall(self._keep_sessions_alive).start(890, False)
APIResource.__init__(self)
def _keep_sessions_alive(self):
for session in self.authenticated_sessions:
session.touch()
def _failed_login(self, host):
def remove_ban(host):
del self.failed_login_attempts[host]
if host in self.failed_login_attempts:
self.failed_login_attempts[host] += 1
reactor.callLater(3600, remove_ban, host)
else:
self.failed_login_attempts[host] = 1
@POST('^/api/v1/login')
def login(self, request):
request.setHeader('content-type', "application/json")
if "localhost" in self.authenticated_sessions:
return json.dumps({"success": True})
if request.getHost().host in self.failed_login_attempts and \
self.failed_login_attempts[request.getHost().host] >= 7:
return json.dumps({"success": False, "reason": "too many attempts"})
try:
if request.args["username"][0] == self.username and request.args["password"][0] == self.password:
self.authenticated_sessions.append(request.getSession())
if request.getHost().host in self.failed_login_attempts:
del self.failed_login_attempts[request.getHost().host]
return json.dumps({"success": True})
else:
raise Exception("Invalid credentials")
except Exception:
self._failed_login(request.getHost().host)
return json.dumps({"success": False, "reason": "invalid username or password"})
@GET('^/api/v1/get_image')
@authenticated
def get_image(self, request):
@defer.inlineCallbacks
def _showImage(resp=None):
@defer.inlineCallbacks
def _setContentDispositionAndSend(file_path, extension, content_type):
request.setHeader('content-disposition', 'filename="%s.%s"' % (file_path, extension))
request.setHeader('content-type', content_type)
request.setHeader('cache-control', 'max-age=604800')
f = open(file_path, "rb")
yield FileSender().beginFileTransfer(f, request)
f.close()
defer.returnValue(0)
if os.path.exists(image_path):
yield _setContentDispositionAndSend(image_path, "jpg", "image/jpeg")
else:
request.setResponseCode(http.NOT_FOUND)
request.write("No such image '%s'" % request.path)
request.finish()
if "hash" in request.args and len(request.args["hash"][0]) == 40:
if self.db.filemap.get_file(request.args["hash"][0]) is not None:
image_path = self.db.filemap.get_file(request.args["hash"][0])
else:
image_path = os.path.join(DATA_FOLDER, "cache", request.args["hash"][0])
if not os.path.exists(image_path) and "guid" in request.args:
node = None
for connection in self.protocol.values():
if connection.handler.node is not None and \
connection.handler.node.id == unhexlify(request.args["guid"][0]):
node = connection.handler.node
self.mserver.get_image(node, unhexlify(request.args["hash"][0])).addCallback(_showImage)
if node is None:
_showImage()
else:
_showImage()
else:
request.write(NoResource().render(request))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/profile')
@authenticated
def get_profile(self, request):
def parse_profile(profile, temp_handle=None):
if profile is not None:
profile_json = {
"profile": {
"name": profile.name,
"location": str(CountryCode.Name(profile.location)),
"public_key": profile.guid_key.public_key.encode("hex"),
"nsfw": profile.nsfw,
"vendor": profile.vendor,
"moderator": profile.moderator,
"moderation_fee": round(profile.moderation_fee, 2),
"handle": profile.handle,
"about": profile.about,
"short_description": profile.short_description[0:160],
"website": profile.website,
"email": profile.email,
"primary_color": profile.primary_color,
"secondary_color": profile.secondary_color,
"background_color": profile.background_color,
"text_color": profile.text_color,
"pgp_key": profile.pgp_key.public_key,
"avatar_hash": profile.avatar_hash.encode("hex"),
"header_hash": profile.header_hash.encode("hex"),
"social_accounts": {},
"last_modified": profile.last_modified
}
}
if temp_handle:
profile_json["profile"]["temp_handle"] = temp_handle
if "guid" in request.args:
profile_json["profile"]["guid"] = request.args["guid"][0]
else:
profile_json["profile"]["guid"] = self.keychain.guid.encode("hex")
for account in profile.social:
profile_json["profile"]["social_accounts"][str(
objects.Profile.SocialAccount.SocialType.Name(account.type)).lower()] = {
"username": account.username,
"proof_url": account.proof_url
}
if (profile.handle is not "" and "(unconfirmed)" not in profile.handle and
not blockchainid.validate(profile.handle, profile_json["profile"]["guid"])):
profile_json["profile"]["handle"] = ""
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(profile_json), indent=4))
request.finish()
else:
request.write(json.dumps({}))
request.finish()
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.get_profile(node).addCallback(parse_profile)
else:
request.write(json.dumps({}))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
p = Profile(self.db).get()
if not p.HasField("guid_key"):
request.write(json.dumps({}))
request.finish()
else:
temp_handle = self.db.profile.get_temp_handle()
parse_profile(p, None if temp_handle == "" else temp_handle)
return server.NOT_DONE_YET
@GET('^/api/v1/get_listings')
@authenticated
def get_listings(self, request):
def parse_listings(listings):
if listings is not None:
response = {"listings": []}
for l in listings.listing:
listing_json = {
"title": l.title,
"contract_hash": l.contract_hash.encode("hex"),
"thumbnail_hash": l.thumbnail_hash.encode("hex"),
"category": l.category,
"price": l.price,
"currency_code": l.currency_code,
"nsfw": l.nsfw,
"origin": str(CountryCode.Name(l.origin)),
"ships_to": [],
"last_modified": l.last_modified,
"pinned": l.pinned,
"hidden": l.hidden
}
if l.contract_type != 0:
listing_json["contract_type"] = str(objects.Listings.ContractType.Name(l.contract_type))
for country in l.ships_to:
listing_json["ships_to"].append(str(CountryCode.Name(country)))
response["listings"].append(listing_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(response), indent=4))
request.finish()
else:
request.write(json.dumps({}))
request.finish()
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.get_listings(node).addCallback(parse_listings)
else:
request.write(json.dumps({}))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
ser = self.db.listings.get_proto()
if ser is not None:
l = objects.Listings()
l.ParseFromString(ser)
parse_listings(l)
else:
parse_listings(None)
return server.NOT_DONE_YET
@GET('^/api/v1/get_followers')
@authenticated
def get_followers(self, request):
def parse_followers(followers):
if followers[0] is not None:
response = {"followers": []}
for f in followers[0].followers:
follower_json = {
"guid": f.guid.encode("hex"),
"handle": f.metadata.handle,
"name": f.metadata.name,
"avatar_hash": f.metadata.avatar_hash.encode("hex"),
"short_description": f.metadata.short_description[0:160],
"nsfw": f.metadata.nsfw
}
response["followers"].append(follower_json)
if followers[1] is not None:
response["count"] = followers[1]
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(response), indent=4))
request.finish()
else:
request.write(json.dumps({}))
request.finish()
start = 0
if "start" in request.args:
start = int(request.args["start"][0])
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.get_followers(node, start).addCallback(parse_followers)
else:
request.write(json.dumps({}))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
ser = self.db.follow.get_followers(start)
if ser[0] is not None:
f = objects.Followers()
f.ParseFromString(ser[0])
parse_followers((f, ser[1]))
else:
parse_followers((None, 0))
return server.NOT_DONE_YET
@GET('^/api/v1/get_following')
@authenticated
def get_following(self, request):
def parse_following(following):
if following is not None:
response = {"following": []}
for f in following.users:
user_json = {
"guid": f.guid.encode("hex"),
"handle": f.metadata.handle,
"name": f.metadata.name,
"avatar_hash": f.metadata.avatar_hash.encode("hex"),
"short_description": f.metadata.short_description[0:160],
"nsfw": f.metadata.nsfw
}
response["following"].append(user_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(response), indent=4))
request.finish()
else:
request.write(json.dumps({}))
request.finish()
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.get_following(node).addCallback(parse_following)
else:
request.write(json.dumps({}))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
ser = self.db.follow.get_following()
if ser is not None:
f = objects.Following()
f.ParseFromString(ser)
parse_following(f)
else:
parse_following(None)
return server.NOT_DONE_YET
@POST('^/api/v1/follow')
@authenticated
def follow(self, request):
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.follow(node)
request.write(json.dumps({"success": True}))
request.finish()
else:
request.write(json.dumps({"success": False, "reason": "could not resolve guid"}, indent=4))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
return server.NOT_DONE_YET
@POST('^/api/v1/unfollow')
@authenticated
def unfollow(self, request):
if "guid" in request.args:
def get_node(node):
if node is not None:
self.mserver.unfollow(node)
request.write(json.dumps({"success": True}))
request.finish()
else:
request.write(json.dumps({"success": False, "reason": "could not resolve guid"}, indent=4))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
return server.NOT_DONE_YET
# pylint: disable=R0201
@POST('^/api/v1/profile')
@authenticated
def update_profile(self, request):
try:
p = Profile(self.db)
can_update_profile = (p.get().HasField("guid_key") or
("name" in request.args and
"location" in request.args))
if not can_update_profile:
request_dict = {
"success": False,
"reason": "name or location not included"
}
request.write(json.dumps(request_dict, indent=4))
request.finish()
return False
u = objects.Profile()
if "name" in request.args:
u.name = request.args["name"][0]
if "location" in request.args:
# This needs to be formatted. Either here or from the UI.
u.location = CountryCode.Value(request.args["location"][0].upper())
if "handle" in request.args:
if blockchainid.validate(request.args["handle"][0], self.keychain.guid.encode("hex")):
u.handle = request.args["handle"][0]
self.db.profile.set_temp_handle("")
else:
u.handle = ""
self.db.profile.set_temp_handle(request.args["handle"][0])
if "about" in request.args:
u.about = request.args["about"][0]
if "short_description" in request.args:
u.short_description = request.args["short_description"][0]
if "nsfw" in request.args:
p.profile.nsfw = str_to_bool(request.args["nsfw"][0])
if "vendor" in request.args:
p.profile.vendor = str_to_bool(request.args["vendor"][0])
if "moderator" in request.args:
p.profile.moderator = str_to_bool(request.args["moderator"][0])
if "moderation_fee" in request.args:
p.profile.moderation_fee = round(float(request.args["moderation_fee"][0]), 2)
if "website" in request.args:
u.website = request.args["website"][0]
if "email" in request.args:
u.email = request.args["email"][0]
if "primary_color" in request.args:
p.profile.primary_color = int(request.args["primary_color"][0])
if "secondary_color" in request.args:
p.profile.secondary_color = int(request.args["secondary_color"][0])
if "background_color" in request.args:
p.profile.background_color = int(request.args["background_color"][0])
if "text_color" in request.args:
p.profile.text_color = int(request.args["text_color"][0])
if "avatar" in request.args:
u.avatar_hash = unhexlify(request.args["avatar"][0])
if "header" in request.args:
u.header_hash = unhexlify(request.args["header"][0])
if "pgp_key" in request.args and "signature" in request.args:
p.add_pgp_key(request.args["pgp_key"][0], request.args["signature"][0],
self.keychain.guid.encode("hex"))
if not p.get().HasField("guid_key"):
key = u.PublicKey()
key.public_key = self.keychain.verify_key.encode()
key.signature = self.keychain.signing_key.sign(key.public_key)[:64]
u.guid_key.MergeFrom(key)
u.last_modified = int(time.time())
p.update(u)
request.write(json.dumps({"success": True}))
request.finish()
self.kserver.node.vendor = p.get().vendor
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/social_accounts')
@authenticated
def add_social_account(self, request):
try:
p = Profile(self.db)
if "account_type" in request.args and "username" in request.args:
p.add_social_account(request.args["account_type"][0],
request.args["username"][0],
request.args["proof"][0] if
"proof" in request.args else None)
else:
raise Exception("Missing required fields")
request.write(json.dumps({"success": True}))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@DELETE('^/api/v1/social_accounts')
@authenticated
def delete_social_account(self, request):
try:
p = Profile(self.db)
if "account_type" in request.args:
p.remove_social_account(request.args["account_type"][0])
request.write(json.dumps({"success": True}))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/contracts')
@authenticated
def get_contract(self, request):
def parse_contract(contract):
if contract is not None:
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(contract), indent=4))
request.finish()
else:
request.write(json.dumps({}))
request.finish()
if "id" in request.args and len(request.args["id"][0]) == 40:
if "guid" in request.args and len(request.args["guid"][0]) == 40:
def get_node(node):
if node is not None:
self.mserver.get_contract(node, unhexlify(request.args["id"][0]))\
.addCallback(parse_contract)
else:
request.write(json.dumps({}))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
try:
with open(self.db.filemap.get_file(request.args["id"][0]), "r") as filename:
contract = json.loads(filename.read(), object_pairs_hook=OrderedDict)
parse_contract(contract)
except Exception:
parse_contract(None)
else:
request.write(json.dumps({}))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/contracts')
@authenticated
def set_contract(self, request):
try:
if "options" in request.args:
options = {}
for option in request.args["options"]:
options[option] = request.args[option]
keywords = None
if "keywords" in request.args:
keywords = []
for keyword in request.args["keywords"]:
keywords.append(keyword)
if len(keywords) > 10:
raise Exception("Too many keywords")
if "contract_id" in request.args:
c = Contract(self.db, hash_value=unhexlify(request.args["contract_id"][0]),
testnet=self.protocol.testnet)
else:
c = Contract(self.db, testnet=self.protocol.testnet)
c.create(
str_to_bool(request.args["pinned"][0]) if "pinned" in request.args else False,
int(request.args["max_quantity"][0]) if "max_quantity" in request.args else 999999,
str_to_bool(request.args["hidden"][0]) if "hidden" in request.args else False,
str(request.args["expiration_date"][0]),
request.args["metadata_category"][0],
request.args["title"][0],
request.args["description"][0],
request.args["currency_code"][0],
request.args["price"][0],
request.args["process_time"][0],
str_to_bool(request.args["nsfw"][0]),
shipping_origin=request.args["shipping_origin"][0] if "shipping_origin" in request.args else None,
shipping_regions=request.args["ships_to"] if "ships_to" in request.args else None,
est_delivery_domestic=request.args["est_delivery_domestic"][0]
if "est_delivery_domestic" in request.args else None,
est_delivery_international=request.args["est_delivery_international"][0]
if "est_delivery_international" in request.args else None,
terms_conditions=request.args["terms_conditions"][0]
if request.args["terms_conditions"][0] is not "" else None,
returns=request.args["returns"][0]
if request.args["returns"][0] is not "" else None,
shipping_currency_code=request.args["shipping_currency_code"][0],
shipping_domestic=request.args["shipping_domestic"][0],
shipping_international=request.args["shipping_international"][0],
keywords=keywords,
category=request.args["category"][0]
if request.args["category"][0] is not "" else None,
condition=request.args["condition"][0]
if request.args["condition"][0] is not "" else None,
sku=request.args["sku"][0] if request.args["sku"][0] is not "" else None,
images=request.args["images"],
free_shipping=str_to_bool(request.args["free_shipping"][0]),
options=options if "options" in request.args else None,
moderators=request.args["moderators"] if "moderators" in request.args else None,
contract_id=request.args["contract_id"][0] if "contract_id" in request.args else None,
)
for keyword in request.args["keywords"]:
if keyword != "":
self.kserver.set(digest(keyword.lower()), unhexlify(c.get_contract_id()),
self.kserver.node.getProto().SerializeToString())
request.write(json.dumps({"success": True, "id": c.get_contract_id()}))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@DELETE('^/api/v1/contracts')
@authenticated
def delete_contract(self, request):
try:
if "id" in request.args:
file_path = self.db.filemap.get_file(request.args["id"][0])
with open(file_path, 'r') as filename:
contract = json.load(filename, object_pairs_hook=OrderedDict)
c = Contract(self.db, contract=contract)
if "keywords" in c.contract["vendor_offer"]["listing"]["item"]:
for keyword in c.contract["vendor_offer"]["listing"]["item"]["keywords"]:
if keyword != "":
if isinstance(keyword, unicode):
keyword = keyword.encode('utf8')
self.kserver.delete(keyword.lower(), unhexlify(c.get_contract_id()),
self.keychain.signing_key.sign(
unhexlify(c.get_contract_id()))[:64])
if "delete_images" in request.args:
c.delete(delete_images=True)
else:
c.delete()
request.write(json.dumps({"success": True}))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/shutdown')
def shutdown(self, request):
session = request.getSession()
if session not in self.authenticated_sessions and request.getHost().host != "127.0.0.1":
session.expire()
request.setResponseCode(401)
request.write('<html><body><div><span style="color:red">Authorization Error</span></div>'
'<h2>Permission Denied</h2></body></html>')
request.finish()
return server.NOT_DONE_YET
else:
for vendor in self.protocol.vendors.values():
self.db.vendors.save_vendor(vendor.id.encode("hex"), vendor.getProto().SerializeToString())
PortMapper().clean_my_mappings(self.kserver.node.port)
self.protocol.shutdown()
reactor.stop()
return
@POST('^/api/v1/make_moderator')
@authenticated
def make_moderator(self, request):
try:
self.mserver.make_moderator()
request.write(json.dumps({"success": True}))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/unmake_moderator')
@authenticated
def unmake_moderator(self, request):
try:
self.mserver.unmake_moderator()
request.write(json.dumps({"success": True}))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/purchase_contract')
@authenticated
def purchase_contract(self, request):
try:
def handle_response(resp, contract):
if resp:
contract.await_funding(self.mserver.protocol.get_notification_listener(),
self.protocol.blockchain, resp)
request.write(json.dumps({"success": True, "payment_address": payment[0],
"amount": payment[1],
"order_id": c.get_order_id()},
indent=4))
request.finish()
else:
request.write(json.dumps({"success": False, "reason": "vendor rejected contract"}, indent=4))
request.finish()
options = None
if "options" in request.args:
options = {}
for option in request.args["options"]:
options[option] = request.args[option][0]
c = Contract(self.db, hash_value=unhexlify(request.args["id"][0]), testnet=self.protocol.testnet)
payment = c.\
add_purchase_info(int(request.args["quantity"][0]),
request.args["refund_address"][0],
request.args["ship_to"][0]
if "ship_to" in request.args else None,
request.args["address"][0]
if "address" in request.args else None,
request.args["city"][0]
if "city" in request.args else None,
request.args["state"][0]
if "state" in request.args else None,
request.args["postal_code"][0]
if "postal_code" in request.args else None,
request.args["country"][0]
if "country" in request.args else None,
request.args["moderator"][0] if "moderator" in request.args else None,
options,
request.args["alternate_contact"][0].decode("utf8")
if "alternate_contact" in request.args else None)
def get_node(node):
if node is not None:
self.mserver.purchase(node, c).addCallback(handle_response, c)
else:
request.write(json.dumps({"success": False, "reason": "unable to reach vendor"}, indent=4))
request.finish()
vendor_guid = unhexlify(c.contract["vendor_offer"]["listing"]["id"]["guid"])
self.kserver.resolve(vendor_guid).addCallback(get_node)
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/confirm_order')
@authenticated
def confirm_order(self, request):
try:
def respond(success):
if success is True:
request.write(json.dumps({"success": True}))
request.finish()
else:
request.write(json.dumps({"success": False, "reason": success}))
request.finish()
file_name = request.args["id"][0] + ".json"
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "in progress", file_name)
with open(file_path, 'r') as filename:
order = json.load(filename, object_pairs_hook=OrderedDict)
c = Contract(self.db, contract=order, testnet=self.protocol.testnet)
if "vendor_order_confirmation" not in c.contract:
c.add_order_confirmation(self.protocol.blockchain,
request.args["payout_address"][0],
comments=request.args["comments"][0]
if "comments" in request.args else None,
shipper=request.args["shipper"][0]
if "shipper" in request.args else None,
tracking_number=request.args["tracking_number"][0]
if "tracking_number" in request.args else None,
est_delivery=request.args["est_delivery"][0]
if "est_delivery" in request.args else None,
url=request.args["url"][0]
if "url" in request.args else None,
password=request.args["password"][0]
if "password" in request.args else None)
guid = c.contract["buyer_order"]["order"]["id"]["guid"]
self.mserver.confirm_order(guid, c).addCallback(respond)
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/upload_image')
@authenticated
def upload_image(self, request):
try:
ret = []
if "image" in request.args:
for image in request.args["image"]:
img = image.decode('base64')
hash_value = digest(img).encode("hex")
with open(os.path.join(DATA_FOLDER, "store", "media", hash_value), 'wb') as outfile:
outfile.write(img)
self.db.filemap.insert(hash_value, os.path.join("store", "media", hash_value))
ret.append(hash_value)
elif "avatar" in request.args:
avi = request.args["avatar"][0].decode("base64")
hash_value = digest(avi).encode("hex")
with open(os.path.join(DATA_FOLDER, "store", "avatar"), 'wb') as outfile:
outfile.write(avi)
self.db.filemap.insert(hash_value, os.path.join("store", "avatar"))
ret.append(hash_value)
elif "header" in request.args:
hdr = request.args["header"][0].decode("base64")
hash_value = digest(hdr).encode("hex")
with open(os.path.join(DATA_FOLDER, "store", "header"), 'wb') as outfile:
outfile.write(hdr)
self.db.filemap.insert(hash_value, os.path.join("store", "header"))
ret.append(hash_value)
request.write(json.dumps({"success": True, "image_hashes": ret}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/complete_order')
@authenticated
def complete_order(self, request):
def respond(success):
if success is True:
request.write(json.dumps({"success": True}))
request.finish()
else:
request.write(json.dumps({"success": False, "reason": success}))
request.finish()
file_path = os.path.join(DATA_FOLDER, "purchases", "in progress", request.args["id"][0] + ".json")
if not os.path.exists(file_path):
file_path = os.path.join(DATA_FOLDER, "purchases", "trade receipts", request.args["id"][0] + ".json")
with open(file_path, 'r') as filename:
order = json.load(filename, object_pairs_hook=OrderedDict)
c = Contract(self.db, contract=order, testnet=self.protocol.testnet)
if "buyer_receipt" not in c.contract:
c.add_receipt(True,
self.protocol.blockchain,
feedback=request.args["feedback"][0] if "feedback" in request.args else None,
quality=request.args["quality"][0] if "quality" in request.args else None,
description=request.args["description"][0] if "description" in request.args else None,
delivery_time=request.args["delivery_time"][0]
if "delivery_time" in request.args else None,
customer_service=request.args["customer_service"][0]
if "customer_service" in request.args else None,
review=request.args["review"][0] if "review" in request.args else "",
anonymous=str_to_bool(request.args["anonymous"]) if "anonymous" in request.args else True)
guid = c.contract["vendor_offer"]["listing"]["id"]["guid"]
self.mserver.complete_order(guid, c).addCallback(respond)
return server.NOT_DONE_YET
@POST('^/api/v1/settings')
@authenticated
def set_settings(self, request):
try:
settings = self.db.settings
resolver = RESOLVER if "resolver" not in request.args or request.args["resolver"][0] == "" \
else request.args["resolver"][0]
if "libbitcoin_server" in request.args and \
request.args["libbitcoin_server"][0] != "" and \
request.args["libbitcoin_server"][0] != "null":
if self.protocol.testnet:
set_value("LIBBITCOIN_SERVERS_TESTNET", "testnet_server_custom",
request.args["libbitcoin_server"][0])
else:
set_value("LIBBITCOIN_SERVERS", "mainnet_server_custom",
request.args["libbitcoin_server"][0])
else:
if self.protocol.testnet:
if get_value("LIBBITCOIN_SERVERS_TESTNET", "testnet_server_custom"):
delete_value("LIBBITCOIN_SERVERS_TESTNET", "testnet_server_custom")
else:
if get_value("LIBBITCOIN_SERVERS", "mainnet_server_custom"):
delete_value("LIBBITCOIN_SERVERS", "mainnet_server_custom")
if resolver != get_value("CONSTANTS", "RESOLVER"):
set_value("CONSTANTS", "RESOLVER", resolver)
if "smtp_notifications" not in request.args:
request.args["smtp_notifications"] = ['false']
smtp_attrs = ["smtp_server", "smtp_sender", "smtp_recipient", "smtp_username", "smtp_password"]
for smtp_attr in smtp_attrs:
if smtp_attr not in request.args:
request.args[smtp_attr] = ['']
settings_list = settings.get()
if "moderators" in request.args and settings_list is not None:
mod_json = settings_list[11]
if mod_json != "":
prev_mods = json.loads(mod_json)
current_mods = request.args["moderators"]
to_add = list(set(current_mods) - set(prev_mods))
to_remove = list(set(prev_mods) - set(current_mods))
if len(to_remove) > 0 or len(to_add) > 0:
self.mserver.update_moderators_on_listings(request.args["moderators"])
settings.update(
request.args["refund_address"][0],
request.args["currency_code"][0],
request.args["country"][0],
request.args["language"][0],
request.args["time_zone"][0],
1 if str_to_bool(request.args["notifications"][0]) else 0,
json.dumps(request.args["shipping_addresses"] if request.args["shipping_addresses"] != "" else []),
json.dumps(request.args["blocked"] if request.args["blocked"] != "" else []),
request.args["terms_conditions"][0],
request.args["refund_policy"][0],
json.dumps(request.args["moderators"] if request.args["moderators"] != "" else []),
1 if str_to_bool(request.args["smtp_notifications"][0]) else 0,
request.args["smtp_server"][0],
request.args["smtp_sender"][0],
request.args["smtp_recipient"][0],
request.args["smtp_username"][0],
request.args["smtp_password"][0]
)
request.write(json.dumps({"success": True}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/settings')
@authenticated
def get_settings(self, request):
settings = self.db.settings.get()
if settings is None:
request.write(json.dumps({}, indent=4))
request.finish()
else:
if self.protocol.nat_type == objects.FULL_CONE:
nat_type = "Open"
elif self.protocol.nat_type == objects.RESTRICTED:
nat_type = "Restricted"
else:
nat_type = "Severely Restricted"
settings_json = {
"refund_address": settings[1],
"currency_code": settings[2],
"country": settings[3],
"language": settings[4],
"time_zone": settings[5],
"notifications": True if settings[6] == 1 else False,
"shipping_addresses": json.loads(settings[7]),
"blocked_guids": json.loads(settings[8]),
"libbitcoin_server": get_value(
"LIBBITCOIN_SERVERS_TESTNET", "testnet_server_custom")if self.protocol.testnet else get_value(
"LIBBITCOIN_SERVERS", "mainnet_server_custom"),
"seed": KeyChain(self.db).signing_key.encode(encoder=nacl.encoding.HexEncoder),
"terms_conditions": "" if settings[9] is None else settings[9],
"refund_policy": "" if settings[10] is None else settings[10],
"resolver": get_value("CONSTANTS", "RESOLVER"),
"network_connection": nat_type,
"transaction_fee": TRANSACTION_FEE,
"smtp_notifications": True if settings[14] == 1 else False,
"smtp_server": settings[15],
"smtp_sender": settings[16],
"smtp_recipient": settings[17],
"smtp_username": settings[18],
"smtp_password": settings[19],
"version": SERVER_VERSION
}
mods = []
try:
for guid in json.loads(settings[11]):
info = self.db.moderators.get_moderator(guid)
if info is not None:
m = {
"guid": guid,
"handle": info[4],
"name": info[5],
"avatar_hash": info[7].encode("hex"),
"short_description": info[6][0:160],
"fee": info[8]
}
mods.append(m)
except Exception:
pass
settings_json["moderators"] = mods
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(settings_json), indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/connected_peers')
@authenticated
def get_connected_peers(self, request):
request.setHeader('content-type', "application/json")
peers = self.protocol.keys()
resp = {
"num_peers": len(peers),
"peers": peers
}
request.write(json.dumps(sanitize_html(resp), indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/routing_table')
@authenticated
def get_routing_table(self, request):
nodes = []
for bucket in self.kserver.protocol.router.buckets:
for node in bucket.nodes.values():
n = {
"guid": node.id.encode("hex"),
"ip": node.ip,
"port": node.port,
"vendor": node.vendor,
"nat_type": objects.NATType.Name(node.nat_type)
}
nodes.append(n)
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(nodes), indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_notifications')
@authenticated
def get_notifications(self, request):
limit = int(request.args["limit"][0]) if "limit" in request.args else 20
start = request.args["start"][0] if "start" in request.args else ""
notifications = self.db.notifications.get_notifications(start, limit)
notification_dict = {
"unread": self.db.notifications.get_unread_count(),
"notifications": []
}
for n in notifications[::-1]:
notification_json = {
"id": n[0],
"guid": n[1],
"handle": n[2],
"type": n[3],
"order_id": n[4],
"title": n[5],
"timestamp": n[6],
"image_hash": n[7].encode("hex"),
"read": False if n[8] == 0 else True
}
notification_dict["notifications"].append(notification_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(notification_dict), indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/mark_notification_as_read')
@authenticated
def mark_notification_as_read(self, request):
try:
for notif_id in request.args["id"]:
self.db.notifications.mark_as_read(notif_id)
request.write(json.dumps({"success": True}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/broadcast')
@authenticated
def broadcast(self, request):
try:
def get_response(num):
request.write(json.dumps({"success": True, "peers reached": num}, indent=4))
request.finish()
self.mserver.broadcast(request.args["message"][0]).addCallback(get_response)
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_chat_messages')
@authenticated
def get_chat_messages(self, request):
start = request.args["start"][0] if "start" in request.args else None
messages = self.db.messages.get_messages(request.args["guid"][0], "CHAT", start)
message_list = []
for m in messages[::-1]:
message_json = {
"id": m[11],
"guid": m[0],
"handle": m[1],
"message": m[5],
"timestamp": m[6],
"avatar_hash": m[7].encode("hex"),
"outgoing": False if m[9] == 0 else True,
"read": False if m[10] == 0 else True
}
message_list.append(message_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(message_list), indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_chat_conversations')
@authenticated
def get_chat_conversations(self, request):
messages = self.db.messages.get_conversations()
request.setHeader('content-type', "application/json")
request.write(json.dumps(messages, indent=4).encode("utf-8"))
request.finish()
return server.NOT_DONE_YET
@DELETE('^/api/v1/chat_conversation')
@authenticated
def delete_conversations(self, request):
try:
self.db.messages.delete_messages(request.args["guid"][0])
request.write(json.dumps({"success": True}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/mark_chat_message_as_read')
@authenticated
def mark_chat_message_as_read(self, request):
try:
self.db.messages.mark_as_read(request.args["guid"][0])
request.write(json.dumps({"success": True}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_sales')
@authenticated
def get_sales(self, request):
if "status" in request.args:
sales = self.db.sales.get_by_status(request.args["status"][0])
else:
sales = self.db.sales.get_all()
sales_list = []
for sale in sales:
sale_json = {
"order_id": sale[0],
"title": sale[1],
"description": sale[2],
"timestamp": sale[3],
"btc_total": sale[4],
"status": sale[5],
"thumbnail_hash": sale[6],
"buyer": sale[7],
"contract_type": sale[8],
"unread": sale[9],
"status_changed": False if sale[10] == 0 else True
}
sales_list.append(sale_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(sales_list), indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_purchases')
@authenticated
def get_purchases(self, request):
purchases = self.db.purchases.get_all()
purchases_list = []
for purchase in purchases:
purchase_json = {
"order_id": purchase[0],
"title": purchase[1],
"description": purchase[2],
"timestamp": purchase[3],
"btc_total": purchase[4],
"status": purchase[5],
"thumbnail_hash": purchase[6],
"vendor": purchase[7],
"contract_type": purchase[8],
"unread": purchase[9],
"status_changed": False if purchase[10] == 0 else True
}
purchases_list.append(purchase_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(purchases_list), indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/check_for_payment')
@authenticated
def check_for_payment(self, request):
if not self.protocol.blockchain.connected:
request.write(json.dumps({"success": False, "reason": "libbitcoin server offline"}, indent=4))
request.finish()
return server.NOT_DONE_YET
try:
check_order_for_payment(request.args["order_id"][0], self.db,
self.protocol.blockchain,
self.mserver.protocol.get_notification_listener(),
self.protocol.testnet)
request.write(json.dumps({"success": True}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_order')
@authenticated
def get_order(self, request):
#TODO: if this is either a funded direct payment sale or complete moderated sale but
#TODO: the payout tx has not hit the blockchain, rebroadcast.
filename = request.args["order_id"][0] + ".json"
if os.path.exists(os.path.join(DATA_FOLDER, "purchases", "unfunded", filename)):
file_path = os.path.join(DATA_FOLDER, "purchases", "unfunded", filename)
status = self.db.purchases.get_status(request.args["order_id"][0])
self.db.purchases.status_changed(request.args["order_id"][0], 0)
elif os.path.exists(os.path.join(DATA_FOLDER, "purchases", "in progress", filename)):
file_path = os.path.join(DATA_FOLDER, "purchases", "in progress", filename)
status = self.db.purchases.get_status(request.args["order_id"][0])
self.db.purchases.status_changed(request.args["order_id"][0], 0)
elif os.path.exists(os.path.join(DATA_FOLDER, "purchases", "trade receipts", filename)):
file_path = os.path.join(DATA_FOLDER, "purchases", "trade receipts", filename)
status = self.db.purchases.get_status(request.args["order_id"][0])
self.db.purchases.status_changed(request.args["order_id"][0], 0)
elif os.path.exists(os.path.join(DATA_FOLDER, "store", "contracts", "unfunded", filename)):
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "unfunded", filename)
status = self.db.sales.get_status(request.args["order_id"][0])
self.db.sales.status_changed(request.args["order_id"][0], 0)
elif os.path.exists(os.path.join(DATA_FOLDER, "store", "contracts", "in progress", filename)):
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "in progress", filename)
status = self.db.sales.get_status(request.args["order_id"][0])
self.db.sales.status_changed(request.args["order_id"][0], 0)
elif os.path.exists(os.path.join(DATA_FOLDER, "store", "contracts", "trade receipts", filename)):
file_path = os.path.join(DATA_FOLDER, "store", "contracts", "trade receipts", filename)
status = self.db.sales.get_status(request.args["order_id"][0])
self.db.sales.status_changed(request.args["order_id"][0], 0)
elif os.path.exists(os.path.join(DATA_FOLDER, "cases", filename)):
file_path = os.path.join(DATA_FOLDER, "cases", filename)
self.db.cases.status_changed(request.args["order_id"][0], 0)
status = 4
else:
request.write(json.dumps({}, indent=4))
request.finish()
return server.NOT_DONE_YET
with open(file_path, 'r') as filename:
order = json.load(filename, object_pairs_hook=OrderedDict)
if status == 0 or status == 2:
check_order_for_payment(request.args["order_id"][0], self.db, self.protocol.blockchain,
self.mserver.protocol.get_notification_listener(),
self.protocol.testnet)
def return_order():
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(order), indent=4))
request.finish()
def height_fetched(ec, chain_height):
payment_address = order["buyer_order"]["order"]["payment"]["address"]
txs = []
def history_fetched(ec, history):
if ec:
return_order()
elif timeout.active():
timeout.cancel()
for tx_type, txid, i, height, value in history: # pylint: disable=W0612
tx = {
"txid": txid.encode("hex"),
"value": round(float(value) / 100000000, 8),
"confirmations": chain_height - height + 1 if height != 0 else 0
}
if tx_type == obelisk.PointIdent.Output:
tx["type"] = "incoming"
else:
tx["type"] = "outgoing"
txs.append(tx)
order["bitcoin_txs"] = txs
request.setHeader('content-type', "application/json")
request.write(json.dumps(order, indent=4))
request.finish()
self.protocol.blockchain.fetch_history2(payment_address, history_fetched)
if self.protocol.blockchain.connected:
self.protocol.blockchain.fetch_last_height(height_fetched)
timeout = reactor.callLater(4, return_order)
else:
return_order()
return server.NOT_DONE_YET
@POST('^/api/v1/dispute_contract')
@authenticated
def dispute_contract(self, request):
try:
self.mserver.open_dispute(request.args["order_id"][0],
request.args["claim"][0] if "claim" in request.args else None)
request.write(json.dumps({"success": True}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/close_dispute')
@authenticated
def close_dispute(self, request):
try:
def cb(resp):
if resp:
request.write(json.dumps({"success": True}, indent=4))
request.finish()
else:
request.write(json.dumps({"success": False, "reason": resp}, indent=4))
request.finish()
d = self.mserver.close_dispute(request.args["order_id"][0],
request.args["resolution"][0]
if "resolution" in request.args else None,
request.args["buyer_percentage"][0]
if "buyer_percentage" in request.args else None,
request.args["vendor_percentage"][0]
if "vendor_percentage" in request.args else None,
request.args["moderator_percentage"][0]
if "moderator_percentage" in request.args else None,
request.args["moderator_address"][0]
if "moderator_address" in request.args else None)
d.addCallback(cb)
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/release_funds')
@authenticated
def release_funds(self, request):
try:
self.mserver.release_funds(request.args["order_id"][0])
request.write(json.dumps({"success": True}, indent=4))
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_cases')
@authenticated
def get_cases(self, request):
cases = self.db.cases.get_all()
cases_list = []
for case in cases:
purchase_json = {
"order_id": case[0],
"title": case[1],
"timestamp": case[2],
"order_date": case[3],
"btc_total": case[4],
"thumbnail_hash": case[5],
"buyer": case[6],
"vendor": case[7],
"validation": json.loads(case[8]),
"status": "closed" if case[10] == 1 else "open",
"unread": case[11],
"status_changed": False if case[12] == 0 else True
}
cases_list.append(purchase_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(cases_list), indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/order_messages')
@authenticated
def order_messages(self, request):
message_list = []
messages = self.db.messages.get_order_messages(request.args["order_id"][0])
for m in messages:
if m[0] is not None:
message_json = {
"guid": m[0],
"handle": m[1],
"message": m[5],
"timestamp": m[6],
"avatar_hash": m[7].encode("hex"),
"message_type": m[4],
"outgoing": False if m[9] == 0 else True
}
message_list.append(message_json)
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(message_list), indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/get_ratings')
@authenticated
def get_ratings(self, request):
def parse_response(ratings):
if ratings is not None:
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(ratings), indent=4))
request.finish()
else:
request.write(json.dumps({}))
request.finish()
if "guid" in request.args:
def get_node(node):
if node is not None:
if "contract_id" in request.args and request.args["contract_id"][0] != "":
self.mserver.get_ratings(node, unhexlify(request.args["contract_id"][0]))\
.addCallback(parse_response)
else:
self.mserver.get_ratings(node).addCallback(parse_response)
else:
request.write(json.dumps({}))
request.finish()
self.kserver.resolve(unhexlify(request.args["guid"][0])).addCallback(get_node)
else:
ratings = []
if "contract_id" in request.args and request.args["contract_id"][0] != "":
for rating in self.db.ratings.get_listing_ratings(request.args["contract_id"][0]):
ratings.append(json.loads(rating[0]))
else:
for rating in self.db.ratings.get_all_ratings():
ratings.append(json.loads(rating[0]))
request.setHeader('content-type', "application/json")
request.write(json.dumps(sanitize_html(ratings), indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/btc_price')
@authenticated
def btc_price(self, request):
request.setHeader('content-type', "application/json")
if "currency" in request.args:
try:
result = BtcPrice.instance().get(request.args["currency"][0].upper(), False)
request.write(json.dumps({"btcExchange":result, "currencyCodes":BtcPrice.instance().prices}))
request.finish()
return server.NOT_DONE_YET
except KeyError:
pass
request.write(json.dumps({"currencyCodes": BtcPrice.instance().prices}))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/refund')
@authenticated
def refund(self, request):
try:
def respond(success):
if success is True:
request.write(json.dumps({"success": True}))
request.finish()
else:
request.write(json.dumps({"success": False, "reason": success}))
request.finish()
self.mserver.refund(request.args["order_id"][0]).addCallback(respond)
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@POST('^/api/v1/mark_discussion_as_read')
@authenticated
def mark_discussion_as_read(self, request):
try:
self.db.purchases.update_unread(request.args["id"][0], reset=True)
self.db.sales.update_unread(request.args["id"][0], reset=True)
self.db.cases.update_unread(request.args["id"][0], reset=True)
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}, indent=4))
request.finish()
return server.NOT_DONE_YET
@GET('^/api/v1/export')
def migrate(self, request):
try:
path = migratev2(self.db)
request.setHeader('content-disposition', 'filename="listings.csv"')
request.setHeader('content-type', "text/csv")
f = open(path)
while 1:
d = f.read(2048)
if not d:
break
request.write(d)
f.close()
request.finish()
return server.NOT_DONE_YET
except Exception, e:
request.write(json.dumps({"success": False, "reason": e.message}))
request.finish()
return server.NOT_DONE_YET
class RestAPI(Site):
def __init__(self, mserver, kserver, openbazaar_protocol, username, password,
authenticated_sessions, only_ip=None, timeout=60 * 60 * 1):
if only_ip == None:
only_ip = ["127.0.0.1"]
self.only_ip = only_ip
api_resource = OpenBazaarAPI(mserver, kserver, openbazaar_protocol,
username, password, authenticated_sessions)
Site.__init__(self, api_resource, timeout=timeout)
def buildProtocol(self, addr):
if addr.host not in self.only_ip and "0.0.0.0" not in self.only_ip:
return
return Site.buildProtocol(self, addr)
| {
"repo_name": "OpenBazaar/OpenBazaar-Server",
"path": "api/restapi.py",
"copies": "2",
"size": "70335",
"license": "mit",
"hash": -1198608660548764400,
"line_mean": 45.6721964167,
"line_max": 116,
"alpha_frac": 0.5250159949,
"autogenerated": false,
"ratio": 4.260145366444579,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.578516136134458,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import json
import os
import time
from constants import DATA_FOLDER
from db.datastore import VendorStore, MessageStore
from random import shuffle
from autobahn.twisted.websocket import WebSocketServerFactory, WebSocketServerProtocol
from protos.countries import CountryCode
from protos.objects import Plaintext_Message, Value
from protos import objects
from twisted.internet import defer
from binascii import unhexlify
from dht.node import Node
class WSProtocol(WebSocketServerProtocol):
"""
Handles new incoming requests coming from a websocket.
"""
def onOpen(self):
self.factory.register(self)
def get_vendors(self, message_id):
if message_id in self.factory.outstanding:
vendors = self.factory.outstanding[message_id]
else:
vendors = VendorStore().get_vendors()
shuffle(vendors)
self.factory.outstanding[message_id] = vendors
def count_results(results):
to_query = 0
for result in results:
if not result:
to_query += 1
for node in vendors[:to_query]:
dl.append(self.factory.mserver.get_user_metadata(node).addCallback(handle_response, node))
defer.gatherResults(dl).addCallback(count_results)
def handle_response(metadata, node):
if metadata is not None:
vendor = {
"id": message_id,
"vendor":
{
"guid": node.id.encode("hex"),
"name": metadata.name,
"handle": metadata.handle,
"avatar_hash": metadata.avatar_hash.encode("hex"),
"nsfw": metadata.nsfw
}
}
self.sendMessage(json.dumps(vendor, indent=4), False)
vendors.remove(node)
return True
else:
VendorStore().delete_vendor(node.id)
vendors.remove(node)
return False
dl = []
for node in vendors[:30]:
dl.append(self.factory.mserver.get_user_metadata(node).addCallback(handle_response, node))
defer.gatherResults(dl).addCallback(count_results)
def get_homepage_listings(self, message_id):
if message_id not in self.factory.outstanding:
self.factory.outstanding[message_id] = []
vendors = VendorStore().get_vendors()
shuffle(vendors)
def count_results(results):
to_query = 30
for result in results:
to_query -= result
shuffle(vendors)
if to_query/3 > 0 and len(vendors) > 0:
for node in vendors[:to_query/3]:
dl.append(self.factory.mserver.get_listings(node).addCallback(handle_response, node))
defer.gatherResults(dl).addCallback(count_results)
def handle_response(listings, node):
count = 0
if listings is not None:
for l in listings.listing:
if l.contract_hash not in self.factory.outstanding[message_id]:
listing_json = {
"id": message_id,
"listing":
{
"guid": node.id.encode("hex"),
"handle": listings.handle,
"avatar_hash": listings.avatar_hash.encode("hex"),
"title": l.title,
"contract_hash": l.contract_hash.encode("hex"),
"thumbnail_hash": l.thumbnail_hash.encode("hex"),
"category": l.category,
"price": l.price,
"currency_code": l.currency_code,
"nsfw": l.nsfw,
"origin": str(CountryCode.Name(l.origin)),
"ships_to": []
}
}
for country in l.ships_to:
listing_json["listing"]["ships_to"].append(str(CountryCode.Name(country)))
if not os.path.isfile(DATA_FOLDER + 'cache/' + l.thumbnail_hash.encode("hex")):
self.factory.mserver.get_image(node, l.thumbnail_hash)
if not os.path.isfile(DATA_FOLDER + 'cache/' + listings.avatar_hash.encode("hex")):
self.factory.mserver.get_image(node, listings.avatar_hash)
self.sendMessage(json.dumps(listing_json, indent=4), False)
count += 1
self.factory.outstanding[message_id].append(l.contract_hash)
if count == 3:
return count
vendors.remove(node)
else:
VendorStore().delete_vendor(node.id)
vendors.remove(node)
return count
dl = []
for vendor in vendors[:10]:
dl.append(self.factory.mserver.get_listings(vendor).addCallback(handle_response, vendor))
defer.gatherResults(dl).addCallback(count_results)
def send_message(self, guid, handle, message, subject, message_type, recipient_encryption_key):
MessageStore().save_message(guid, handle, "", recipient_encryption_key, subject,
message_type, message, "", time.time(), "", True)
def send(node_to_send):
n = node_to_send if node_to_send is not None else Node(unhexlify(guid), "123.4.5.6", 1234)
self.factory.mserver.send_message(n, recipient_encryption_key,
Plaintext_Message.Type.Value(message_type.upper()),
message, subject)
self.factory.kserver.resolve(unhexlify(guid)).addCallback(send)
def search(self, message_id, keyword):
def respond(l, node):
if l is not None:
listing_json = {
"id": message_id,
"listing":
{
"guid": node.id.encode("hex"),
"title": l.title,
"contract_hash": l.contract_hash.encode("hex"),
"thumbnail_hash": l.thumbnail_hash.encode("hex"),
"category": l.category,
"price": l.price,
"currency_code": l.currency_code,
"nsfw": l.nsfw,
"origin": str(CountryCode.Name(l.origin)),
"ships_to": []
}
}
for country in l.ships_to:
listing_json["listing"]["ships_to"].append(str(CountryCode.Name(country)))
self.sendMessage(json.dumps(listing_json, indent=4), False)
def parse_results(values):
if values is not None:
for v in values:
try:
val = Value()
val.ParseFromString(v)
n = objects.Node()
n.ParseFromString(val.serializedData)
node_to_ask = Node(n.guid, n.ip, n.port, n.signedPublicKey, True)
self.factory.mserver.get_contract_metadata(node_to_ask,
val.valueKey).addCallback(respond, node_to_ask)
except Exception:
pass
self.factory.kserver.get(keyword.lower()).addCallback(parse_results)
def onMessage(self, payload, isBinary):
try:
request_json = json.loads(payload)
message_id = request_json["request"]["id"]
if request_json["request"]["command"] == "get_vendors":
self.get_vendors(message_id)
elif request_json["request"]["command"] == "get_homepage_listings":
self.get_homepage_listings(message_id)
elif request_json["request"]["command"] == "search":
self.search(message_id, request_json["request"]["keyword"].lower())
elif request_json["request"]["command"] == "send_message":
self.send_message(request_json["request"]["guid"],
request_json["request"]["handle"],
request_json["request"]["message"],
request_json["request"]["subject"],
request_json["request"]["message_type"],
request_json["request"]["recipient_key"])
except Exception:
pass
def connectionLost(self, reason):
WebSocketServerProtocol.connectionLost(self, reason)
self.factory.unregister(self)
class WSFactory(WebSocketServerFactory):
"""
Simple broadcast server broadcasting any message it receives to all
currently connected clients.
"""
def __init__(self, url, mserver, kserver, debug=False, debugCodePaths=False):
WebSocketServerFactory.__init__(self, url, debug=debug, debugCodePaths=debugCodePaths)
self.mserver = mserver
self.kserver = kserver
self.outstanding = {}
self.clients = []
def register(self, client):
if client not in self.clients:
self.clients.append(client)
def unregister(self, client):
if client in self.clients:
self.clients.remove(client)
def push(self, msg):
for c in self.clients:
c.sendMessage(msg)
| {
"repo_name": "hoffmabc/OpenBazaar-Server",
"path": "ws.py",
"copies": "1",
"size": "10032",
"license": "mit",
"hash": -8211336571965899000,
"line_mean": 42.2413793103,
"line_max": 114,
"alpha_frac": 0.5028907496,
"autogenerated": false,
"ratio": 4.659544821179749,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.566243557077975,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import json
import os
import time
from constants import DATA_FOLDER
from market.profile import Profile
from keyutils.keys import KeyChain
from random import shuffle
from autobahn.twisted.websocket import WebSocketServerFactory, WebSocketServerProtocol
from protos.countries import CountryCode
from protos.objects import Plaintext_Message, Value, Listings
from protos import objects
from twisted.internet import defer
from binascii import unhexlify
from dht.node import Node
class WSProtocol(WebSocketServerProtocol):
"""
Handles new incoming requests coming from a websocket.
"""
def onOpen(self):
self.factory.register(self)
def get_vendors(self, message_id):
if message_id in self.factory.outstanding:
vendors = self.factory.outstanding[message_id]
else:
vendors = self.factory.db.VendorStore().get_vendors()
shuffle(vendors)
self.factory.outstanding[message_id] = vendors
def count_results(results):
to_query = 0
for result in results:
if not result:
to_query += 1
for node in vendors[:to_query]:
dl.append(self.factory.mserver.get_user_metadata(node).addCallback(handle_response, node))
defer.gatherResults(dl).addCallback(count_results)
def handle_response(metadata, node):
if metadata is not None:
vendor = {
"id": message_id,
"vendor":
{
"guid": node.id.encode("hex"),
"name": metadata.name,
"short_description": metadata.short_description,
"handle": metadata.handle,
"avatar_hash": metadata.avatar_hash.encode("hex"),
"nsfw": metadata.nsfw
}
}
self.sendMessage(json.dumps(vendor, indent=4), False)
vendors.remove(node)
return True
else:
self.factory.db.VendorStore().delete_vendor(node.id)
vendors.remove(node)
return False
dl = []
for node in vendors[:30]:
dl.append(self.factory.mserver.get_user_metadata(node).addCallback(handle_response, node))
defer.gatherResults(dl).addCallback(count_results)
def get_moderators(self, message_id):
m = self.factory.db.ModeratorStore()
def parse_response(moderators):
if moderators is not None:
m.clear_all()
def parse_profile(profile, node):
if profile is not None:
m.save_moderator(node.id, node.signed_pubkey, profile.encryption_key.public_key,
profile.encryption_key.signature, profile.bitcoin_key.public_key,
profile.bitcoin_key.signature, profile.handle)
moderator = {
"id": message_id,
"moderator":
{
"guid": node.id.encode("hex"),
"name": profile.name,
"handle": profile.handle,
"short_description": profile.short_description,
"avatar_hash": profile.avatar_hash.encode("hex"),
"about": profile.about
}
}
self.sendMessage(json.dumps(moderator, indent=4), False)
else:
m.delete_moderator(node.id)
for mod in moderators:
try:
val = objects.Value()
val.ParseFromString(mod)
n = objects.Node()
n.ParseFromString(val.serializedData)
node_to_ask = Node(n.guid, n.ip, n.port, n.signedPublicKey)
if n.guid == KeyChain(self.factory.db).guid:
parse_profile(Profile(self.factory.db).get(), node_to_ask)
else:
self.factory.mserver.get_profile(node_to_ask)\
.addCallback(parse_profile, node_to_ask)
except Exception:
pass
self.factory.kserver.get("moderators").addCallback(parse_response)
def get_homepage_listings(self, message_id):
if message_id not in self.factory.outstanding:
self.factory.outstanding[message_id] = []
vendors = self.factory.db.VendorStore().get_vendors()
shuffle(vendors)
def count_results(results):
to_query = 30
for result in results:
to_query -= result
shuffle(vendors)
if to_query/3 > 0 and len(vendors) > 0:
for node in vendors[:to_query/3]:
dl.append(self.factory.mserver.get_listings(node).addCallback(handle_response, node))
defer.gatherResults(dl).addCallback(count_results)
def handle_response(listings, node):
count = 0
if listings is not None:
for l in listings.listing:
if l.contract_hash not in self.factory.outstanding[message_id]:
listing_json = {
"id": message_id,
"listing":
{
"guid": node.id.encode("hex"),
"handle": listings.handle,
"avatar_hash": listings.avatar_hash.encode("hex"),
"title": l.title,
"contract_hash": l.contract_hash.encode("hex"),
"thumbnail_hash": l.thumbnail_hash.encode("hex"),
"category": l.category,
"price": l.price,
"currency_code": l.currency_code,
"nsfw": l.nsfw,
"origin": str(CountryCode.Name(l.origin)),
"ships_to": []
}
}
for country in l.ships_to:
listing_json["listing"]["ships_to"].append(str(CountryCode.Name(country)))
if not os.path.isfile(DATA_FOLDER + 'cache/' + l.thumbnail_hash.encode("hex")):
self.factory.mserver.get_image(node, l.thumbnail_hash)
if not os.path.isfile(DATA_FOLDER + 'cache/' + listings.avatar_hash.encode("hex")):
self.factory.mserver.get_image(node, listings.avatar_hash)
self.sendMessage(json.dumps(listing_json, indent=4), False)
count += 1
self.factory.outstanding[message_id].append(l.contract_hash)
if count == 3:
return count
vendors.remove(node)
else:
self.factory.db.VendorStore().delete_vendor(node.id)
vendors.remove(node)
return count
dl = []
for vendor in vendors[:10]:
dl.append(self.factory.mserver.get_listings(vendor).addCallback(handle_response, vendor))
defer.gatherResults(dl).addCallback(count_results)
def send_message(self, guid, handle, message, subject, message_type, recipient_encryption_key):
self.factory.db.MessageStore().save_message(guid, handle, "", recipient_encryption_key, subject,
message_type, message, "", time.time(), "", True)
def send(node_to_send):
n = node_to_send if node_to_send is not None else Node(unhexlify(guid), "123.4.5.6", 1234)
self.factory.mserver.send_message(n, recipient_encryption_key,
Plaintext_Message.Type.Value(message_type.upper()),
message, subject)
self.factory.kserver.resolve(unhexlify(guid)).addCallback(send)
def search(self, message_id, keyword):
def respond(l, node):
if l is not None:
listing_json = {
"id": message_id,
"listing":
{
"guid": node.id.encode("hex"),
"title": l.title,
"contract_hash": l.contract_hash.encode("hex"),
"thumbnail_hash": l.thumbnail_hash.encode("hex"),
"category": l.category,
"price": l.price,
"currency_code": l.currency_code,
"nsfw": l.nsfw,
"origin": str(CountryCode.Name(l.origin)),
"ships_to": []
}
}
for country in l.ships_to:
listing_json["listing"]["ships_to"].append(str(CountryCode.Name(country)))
self.sendMessage(json.dumps(listing_json, indent=4), False)
def parse_results(values):
if values is not None:
for v in values:
try:
val = Value()
val.ParseFromString(v)
n = objects.Node()
n.ParseFromString(val.serializedData)
node_to_ask = Node(n.guid, n.ip, n.port, n.signedPublicKey, True)
if n.guid == KeyChain(self.factory.db).guid:
proto = self.factory.db.ListingsStore().get_proto()
l = Listings()
l.ParseFromString(proto)
for listing in l.listing:
if listing.contract_hash == val.valueKey:
respond(listing, node_to_ask)
else:
self.factory.mserver.get_contract_metadata(node_to_ask, val.valueKey)\
.addCallback(respond, node_to_ask)
except Exception:
pass
self.factory.kserver.get(keyword.lower()).addCallback(parse_results)
def onMessage(self, payload, isBinary):
try:
request_json = json.loads(payload)
message_id = request_json["request"]["id"]
if request_json["request"]["command"] == "get_vendors":
self.get_vendors(message_id)
if request_json["request"]["command"] == "get_moderators":
self.get_moderators(message_id)
elif request_json["request"]["command"] == "get_homepage_listings":
self.get_homepage_listings(message_id)
elif request_json["request"]["command"] == "search":
self.search(message_id, request_json["request"]["keyword"].lower())
elif request_json["request"]["command"] == "send_message":
self.send_message(request_json["request"]["guid"],
request_json["request"]["handle"],
request_json["request"]["message"],
request_json["request"]["subject"],
request_json["request"]["message_type"],
request_json["request"]["recipient_key"])
except Exception:
pass
def connectionLost(self, reason):
WebSocketServerProtocol.connectionLost(self, reason)
self.factory.unregister(self)
class WSFactory(WebSocketServerFactory):
"""
Simple broadcast server broadcasting any message it receives to all
currently connected clients.
"""
def __init__(self, url, mserver, kserver, debug=False, debugCodePaths=False):
WebSocketServerFactory.__init__(self, url, debug=debug, debugCodePaths=debugCodePaths)
self.mserver = mserver
self.kserver = kserver
self.db = mserver.db
self.outstanding = {}
self.clients = []
def register(self, client):
if client not in self.clients:
self.clients.append(client)
def unregister(self, client):
if client in self.clients:
self.clients.remove(client)
def push(self, msg):
for c in self.clients:
c.sendMessage(msg)
| {
"repo_name": "JimmyMow/OpenBazaar-Server",
"path": "api/ws.py",
"copies": "3",
"size": "13034",
"license": "mit",
"hash": -7454445136068461000,
"line_mean": 44.1003460208,
"line_max": 107,
"alpha_frac": 0.4881847476,
"autogenerated": false,
"ratio": 4.76563071297989,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0016634263431675436,
"num_lines": 289
} |
__author__ = 'chris'
import json
import random
import time
import nacl.signing
import bitcoin
from hashlib import sha256
from binascii import unhexlify, hexlify
from collections import OrderedDict
from urllib2 import Request, urlopen, URLError
import re
import os
import nacl.encoding
from twisted.internet import reactor
from protos.objects import Listings
from protos.countries import CountryCode
from dht.utils import digest
from constants import DATA_FOLDER
from market.profile import Profile
from keyutils.keys import KeyChain
from keyutils.bip32utils import derive_childkey
from log import Logger
class Contract(object):
"""
A class for creating and interacting with OpenBazaar Ricardian contracts.
"""
def __init__(self, database, contract=None, hash_value=None, testnet=False):
"""
This class can be instantiated with either an `OrderedDict` or a hash
of a contract. If a hash is used, we will load the contract from either
the file system or cache.
Alternatively, pass in no parameters if the intent is to create a new
contract.
Args:
contract: an `OrderedDict` containing a filled out json contract
hash: a hash160 (in raw bytes) of a contract
testnet: is this contract on the testnet
"""
self.db = database
self.keychain = KeyChain(self.db)
if contract is not None:
self.contract = contract
elif hash_value is not None:
try:
file_path = self.db.HashMap().get_file(hash_value)
if file_path is None:
file_path = DATA_FOLDER + "cache/" + hexlify(hash_value)
with open(file_path, 'r') as filename:
self.contract = json.load(filename, object_pairs_hook=OrderedDict)
except Exception:
try:
file_path = DATA_FOLDER + "purchases/in progress/" + hexlify(hash_value) + ".json"
with open(file_path, 'r') as filename:
self.contract = json.load(filename, object_pairs_hook=OrderedDict)
except Exception:
self.contract = {}
else:
self.contract = {}
self.log = Logger(system=self)
# used when purchasing this contract
self.testnet = testnet
self.ws = None
self.blockchain = None
self.amount_funded = 0
self.received_txs = []
self.timeout = None
self.is_purchase = False
def create(self,
expiration_date,
metadata_category,
title,
description,
currency_code,
price,
process_time,
nsfw,
shipping_origin=None,
shipping_regions=None,
est_delivery_domestic=None,
est_delivery_international=None,
terms_conditions=None,
returns=None,
keywords=None,
category=None,
condition=None,
sku=None,
images=None,
free_shipping=None,
shipping_currency_code=None,
shipping_domestic=None,
shipping_international=None,
options=None,
moderators=None):
"""
All parameters are strings except:
:param expiration_date: `string` (must be formatted UTC datetime)
:param keywords: `list`
:param nsfw: `boolean`
:param images: a `list` of image files
:param free_shipping: `boolean`
:param shipping_origin: a 'string' formatted `CountryCode`
:param shipping_regions: a 'list' of 'string' formatted `CountryCode`s
:param options: a 'dict' containing options as keys and 'list' as option values.
:param moderators: a 'list' of 'string' guids (hex encoded).
"""
profile = Profile(self.db).get()
self.contract = OrderedDict(
{
"vendor_offer": {
"listing": {
"metadata": {
"version": "0.1",
"category": metadata_category.lower(),
"category_sub": "fixed price"
},
"id": {
"guid": self.keychain.guid.encode("hex"),
"pubkeys": {
"guid": self.keychain.guid_signed_pubkey[64:].encode("hex"),
"bitcoin": bitcoin.bip32_extract_key(self.keychain.bitcoin_master_pubkey),
"encryption": self.keychain.encryption_pubkey.encode("hex")
}
},
"item": {
"title": title,
"description": description,
"process_time": process_time,
"price_per_unit": {},
"nsfw": nsfw
}
}
}
}
)
if expiration_date.lower() == "never":
self.contract["vendor_offer"]["listing"]["metadata"]["expiry"] = "never"
else:
self.contract["vendor_offer"]["listing"]["metadata"]["expiry"] = expiration_date + " UTC"
if metadata_category == "physical good" and condition is not None:
self.contract["vendor_offer"]["listing"]["item"]["condition"] = condition
if currency_code.upper() == "BTC":
item = self.contract["vendor_offer"]["listing"]["item"]
item["price_per_unit"]["bitcoin"] = price
else:
item = self.contract["vendor_offer"]["listing"]["item"]
item["price_per_unit"]["fiat"] = {}
item["price_per_unit"]["fiat"]["price"] = price
item["price_per_unit"]["fiat"]["currency_code"] = currency_code
if keywords is not None:
self.contract["vendor_offer"]["listing"]["item"]["keywords"] = []
self.contract["vendor_offer"]["listing"]["item"]["keywords"].extend(keywords)
if category is not None:
self.contract["vendor_offer"]["listing"]["item"]["category"] = category
if sku is not None:
self.contract["vendor_offer"]["listing"]["item"]["sku"] = sku
if options is not None:
self.contract["vendor_offer"]["listing"]["item"]["options"] = options
if metadata_category == "physical good":
self.contract["vendor_offer"]["listing"]["shipping"] = {}
shipping = self.contract["vendor_offer"]["listing"]["shipping"]
shipping["shipping_origin"] = shipping_origin
if free_shipping is False:
self.contract["vendor_offer"]["listing"]["shipping"]["free"] = False
self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"] = {}
if shipping_currency_code == "BTC":
self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["bitcoin"] = {}
self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["bitcoin"][
"domestic"] = shipping_domestic
self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["bitcoin"][
"international"] = shipping_international
else:
shipping = self.contract["vendor_offer"]["listing"]["shipping"]
shipping["flat_fee"]["fiat"] = {}
shipping["flat_fee"]["fiat"]["price"] = {}
shipping["flat_fee"]["fiat"]["price"][
"domestic"] = shipping_domestic
shipping["flat_fee"]["fiat"]["price"][
"international"] = shipping_international
shipping["flat_fee"]["fiat"][
"currency_code"] = shipping_currency_code
else:
self.contract["vendor_offer"]["listing"]["shipping"]["free"] = True
self.contract["vendor_offer"]["listing"]["shipping"]["shipping_regions"] = []
for region in shipping_regions:
shipping = self.contract["vendor_offer"]["listing"]["shipping"]
shipping["shipping_regions"].append(region)
listing = self.contract["vendor_offer"]["listing"]
listing["shipping"]["est_delivery"] = {}
listing["shipping"]["est_delivery"]["domestic"] = est_delivery_domestic
listing["shipping"]["est_delivery"][
"international"] = est_delivery_international
if profile.HasField("handle"):
self.contract["vendor_offer"]["listing"]["id"]["blockchain_id"] = profile.handle
if images is not None:
self.contract["vendor_offer"]["listing"]["item"]["image_hashes"] = []
for image_hash in images:
self.contract["vendor_offer"]["listing"]["item"]["image_hashes"].append(image_hash)
if terms_conditions is not None or returns is not None:
self.contract["vendor_offer"]["listing"]["policy"] = {}
if terms_conditions is not None:
self.contract["vendor_offer"]["listing"]["policy"]["terms_conditions"] = terms_conditions
if returns is not None:
self.contract["vendor_offer"]["listing"]["policy"]["returns"] = returns
if moderators is not None:
self.contract["vendor_offer"]["listing"]["moderators"] = []
for mod in moderators:
mod_info = self.db.ModeratorStore().get_moderator(unhexlify(mod))
print mod_info
if mod_info is not None:
moderator = {
"guid": mod,
"blockchain_id": mod_info[6],
"pubkeys": {
"signing": {
"key": mod_info[1][64:].encode("hex"),
"signature": mod_info[1][:64].encode("hex")
},
"encryption": {
"key": mod_info[2].encode("hex"),
"signature": mod_info[3].encode("hex")
},
"bitcoin": {
"key": mod_info[4].encode("hex"),
"signature": mod_info[5].encode("hex")
}
}
}
self.contract["vendor_offer"]["listing"]["moderators"].append(moderator)
listing = json.dumps(self.contract["vendor_offer"]["listing"], indent=4)
self.contract["vendor_offer"]["signature"] = \
self.keychain.signing_key.sign(listing, encoder=nacl.encoding.HexEncoder)[:128]
self.save()
def add_purchase_info(self,
quantity,
ship_to=None,
shipping_address=None,
city=None,
state=None,
postal_code=None,
country=None,
moderator=None,
options=None):
"""
Update the contract with the buyer's purchase information.
"""
profile = Profile(self.db).get()
order_json = {
"buyer_order": {
"order": {
"ref_hash": digest(json.dumps(self.contract, indent=4)).encode("hex"),
"quantity": quantity,
"id": {
"guid": self.keychain.guid.encode("hex"),
"pubkeys": {
"guid": self.keychain.guid_signed_pubkey[64:].encode("hex"),
"bitcoin": bitcoin.bip32_extract_key(self.keychain.bitcoin_master_pubkey),
"encryption": self.keychain.encryption_pubkey.encode("hex")
}
},
"payment": {}
}
}
}
if profile.HasField("handle"):
order_json["buyer_order"]["order"]["id"]["blockchain_id"] = profile.handle
if self.contract["vendor_offer"]["listing"]["metadata"]["category"] == "physical good":
order_json["buyer_order"]["order"]["shipping"] = {}
order_json["buyer_order"]["order"]["shipping"]["ship_to"] = ship_to
order_json["buyer_order"]["order"]["shipping"]["address"] = shipping_address
order_json["buyer_order"]["order"]["shipping"]["city"] = city
order_json["buyer_order"]["order"]["shipping"]["state"] = state
order_json["buyer_order"]["order"]["shipping"]["postal_code"] = postal_code
order_json["buyer_order"]["order"]["shipping"]["country"] = country
if options is not None:
order_json["buyer_order"]["order"]["options"] = options
if moderator: # TODO: Handle direct payments
chaincode = sha256(str(random.getrandbits(256))).digest().encode("hex")
order_json["buyer_order"]["order"]["payment"]["chaincode"] = chaincode
valid_mod = False
for mod in self.contract["vendor_offer"]["listing"]["moderators"]:
if mod["guid"] == moderator:
order_json["buyer_order"]["order"]["moderator"] = moderator
masterkey_m = mod["pubkeys"]["bitcoin"]["key"]
valid_mod = True
if not valid_mod:
return False
masterkey_b = bitcoin.bip32_extract_key(self.keychain.bitcoin_master_pubkey)
masterkey_v = self.contract["vendor_offer"]["listing"]["id"]["pubkeys"]["bitcoin"]
buyer_key = derive_childkey(masterkey_b, chaincode)
vendor_key = derive_childkey(masterkey_v, chaincode)
moderator_key = derive_childkey(masterkey_m, chaincode)
redeem_script = '75' + bitcoin.mk_multisig_script([buyer_key, vendor_key, moderator_key], 2)
order_json["buyer_order"]["order"]["payment"]["redeem_script"] = redeem_script
if self.testnet:
payment_address = bitcoin.p2sh_scriptaddr(redeem_script, 196)
else:
payment_address = bitcoin.p2sh_scriptaddr(redeem_script)
order_json["buyer_order"]["order"]["payment"]["address"] = payment_address
price_json = self.contract["vendor_offer"]["listing"]["item"]["price_per_unit"]
if "bitcoin" in price_json:
order_json["buyer_order"]["order"]["payment"]["amount"] = price_json["bitcoin"]
else:
currency_code = price_json["fiat"]["currency_code"]
fiat_price = price_json["fiat"]["price"]
try:
request = Request('https://api.bitcoinaverage.com/ticker/' + currency_code.upper() + '/last')
response = urlopen(request)
conversion_rate = response.read()
except URLError:
return False
order_json["buyer_order"]["order"]["payment"]["amount"] = float(
"{0:.8f}".format(float(fiat_price) / float(conversion_rate)))
self.contract["buyer_order"] = order_json["buyer_order"]
order = json.dumps(self.contract["buyer_order"]["order"], indent=4)
self.contract["buyer_order"]["signature"] = \
self.keychain.signing_key.sign(order, encoder=nacl.encoding.HexEncoder)[:128]
return self.contract["buyer_order"]["order"]["payment"]["address"]
def add_order_confirmation(self,
payout_address,
comments=None,
shipper=None,
tracking_number=None,
est_delivery=None,
url=None,
password=None):
"""
Add the vendor's order confirmation to the contract.
"""
if not self.testnet and not (payout_address[:1] == "1" or payout_address[:1] == "3"):
raise Exception("Bitcoin address is not a mainnet address")
elif self.testnet and not \
(payout_address[:1] == "n" or payout_address[:1] == "m" or payout_address[:1] == "2"):
raise Exception("Bitcoin address is not a testnet address")
try:
bitcoin.b58check_to_hex(payout_address)
except AssertionError:
raise Exception("Invalid Bitcoin address")
conf_json = {
"vendor_order_confirmation": {
"invoice": {
"ref_hash": digest(json.dumps(self.contract, indent=4)).encode("hex"),
"payout_address": payout_address
}
}
}
if self.contract["vendor_offer"]["listing"]["metadata"]["category"] == "physical good":
shipping = {"shipper": shipper, "tracking_number": tracking_number, "est_delivery": est_delivery}
conf_json["vendor_order_confirmation"]["invoice"]["shipping"] = shipping
elif self.contract["vendor_offer"]["listing"]["metadata"]["category"] == "digital good":
content_source = {"url": url, "password": password}
conf_json["vendor_order_confirmation"]["invoice"]["content_source"] = content_source
if comments:
conf_json["vendor_order_confirmation"]["invoice"]["comments"] = comments
confirmation = json.dumps(conf_json["vendor_order_confirmation"]["invoice"], indent=4)
conf_json["vendor_order_confirmation"]["signature"] = \
self.keychain.signing_key.sign(confirmation, encoder=nacl.encoding.HexEncoder)[:128]
order_id = digest(json.dumps(self.contract, indent=4)).encode("hex")
self.contract["vendor_order_confirmation"] = conf_json["vendor_order_confirmation"]
self.db.Sales().update_status(order_id, 2)
file_path = DATA_FOLDER + "store/listings/in progress/" + order_id + ".json"
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(self.contract, indent=4))
def accept_order_confirmation(self, ws, confirmation_json=None):
"""
Validate the order confirmation sent over from the seller and update our node accordingly.
"""
self.ws = ws
try:
if confirmation_json:
self.contract["vendor_order_confirmation"] = json.loads(confirmation_json,
object_pairs_hook=OrderedDict)
contract_dict = json.loads(json.dumps(self.contract, indent=4), object_pairs_hook=OrderedDict)
del contract_dict["vendor_order_confirmation"]
contract_hash = digest(json.dumps(contract_dict, indent=4)).encode("hex")
ref_hash = self.contract["vendor_order_confirmation"]["invoice"]["ref_hash"]
if ref_hash != contract_hash:
raise Exception("Order number doesn't match")
if self.contract["vendor_offer"]["listing"]["metadata"]["category"] == "physical good":
shipping = self.contract["vendor_order_confirmation"]["invoice"]["shipping"]
if "tracking_number" not in shipping or "shipper" not in shipping:
raise Exception("No shipping information")
# update the order status in the db
self.db.Purchases().update_status(contract_hash, 2)
file_path = DATA_FOLDER + "purchases/in progress/" + contract_hash + ".json"
# update the contract in the file system
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(self.contract, indent=4))
message_json = {
"order_confirmation": {
"order_id": contract_hash,
"title": self.contract["vendor_offer"]["listing"]["item"]["title"]
}
}
# push the message over websockets
self.ws.push(json.dumps(message_json, indent=4))
return contract_hash
except Exception:
return False
def await_funding(self, websocket_server, libbitcoin_client, proofSig, is_purchase=True):
"""
Saves the contract to the file system and db as an unfunded contract.
Listens on the libbitcoin server for the multisig address to be funded.
Deletes the unfunded contract from the file system and db if it goes
unfunded for more than 10 minutes.
"""
# TODO: Handle direct payments
self.ws = websocket_server
self.blockchain = libbitcoin_client
self.is_purchase = is_purchase
order_id = digest(json.dumps(self.contract, indent=4)).encode("hex")
payment_address = self.contract["buyer_order"]["order"]["payment"]["address"]
vendor_item = self.contract["vendor_offer"]["listing"]["item"]
if "image_hashes" in vendor_item:
thumbnail_hash = vendor_item["image_hashes"][0]
else:
thumbnail_hash = ""
if "blockchain_id" in self.contract["vendor_offer"]["listing"]["id"]:
vendor = self.contract["vendor_offer"]["listing"]["id"]["blockchain_id"]
else:
vendor = self.contract["vendor_offer"]["listing"]["id"]["guid"]
if is_purchase:
file_path = DATA_FOLDER + "purchases/in progress/" + order_id + ".json"
self.db.Purchases().new_purchase(order_id,
self.contract["vendor_offer"]["listing"]["item"]["title"],
time.time(),
self.contract["buyer_order"]["order"]["payment"]["amount"],
payment_address,
0,
thumbnail_hash,
vendor,
proofSig)
else:
file_path = DATA_FOLDER + "store/listings/in progress/" + order_id + ".json"
self.db.Sales().new_sale(order_id,
self.contract["vendor_offer"]["listing"]["item"]["title"],
time.time(),
self.contract["buyer_order"]["order"]["payment"]["amount"],
payment_address,
0,
thumbnail_hash,
vendor)
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(self.contract, indent=4))
self.timeout = reactor.callLater(600, self._delete_unfunded)
self.blockchain.subscribe_address(payment_address, notification_cb=self.on_tx_received)
def _delete_unfunded(self):
"""
The user failed to fund the contract in the 10 minute window. Remove it from
the file system and db.
"""
order_id = digest(json.dumps(self.contract, indent=4)).encode("hex")
if self.is_purchase:
file_path = DATA_FOLDER + "purchases/in progress/" + order_id + ".json"
self.db.Purchases().delete_purchase(order_id)
else:
file_path = DATA_FOLDER + "store/listings/in progress/" + order_id + ".json"
self.db.Sales().delete_sale(order_id)
if os.path.exists(file_path):
os.remove(file_path)
def on_tx_received(self, address_version, address_hash, height, block_hash, tx):
"""
Fire when the libbitcoin server tells us we received a payment to this funding address.
While unlikely, a user may send multiple transactions to the funding address reach the
funding level. We need to keep a running balance and increment it when a new transaction
is received. If the contract is fully funded, we push a notification to the websockets.
"""
# decode the transaction
transaction = bitcoin.deserialize(tx.encode("hex"))
# get the amount (in satoshi) the user is expected to pay
amount_to_pay = int(float(self.contract["buyer_order"]["order"]["payment"]["amount"]) * 100000000)
if tx not in self.received_txs: # make sure we aren't parsing the same tx twice.
output_script = 'a914' + digest(unhexlify(
self.contract["buyer_order"]["order"]["payment"]["redeem_script"])).encode("hex") + '87'
for output in transaction["outs"]:
if output["script"] == output_script:
self.amount_funded += output["value"]
if tx not in self.received_txs:
self.received_txs.append(tx)
if self.amount_funded >= amount_to_pay: # if fully funded
self.timeout.cancel()
self.blockchain.unsubscribe_address(
self.contract["buyer_order"]["order"]["payment"]["address"], self.on_tx_received)
order_id = digest(json.dumps(self.contract, indent=4)).encode("hex")
if self.is_purchase:
message_json = {
"payment_received": {
"address": self.contract["buyer_order"]["order"]["payment"]["address"],
"order_id": order_id
}
}
# update the db
self.db.Purchases().update_status(order_id, 1)
self.log.info("Payment for order id %s successfully broadcast to network." % order_id)
else:
message_json = {
"new_order": {
"order_id": order_id,
"title": self.contract["vendor_offer"]["listing"]["item"]["title"]
}
}
self.db.Sales().update_status(order_id, 1)
self.log.info("Received new order %s" % order_id)
# push the message over websockets
self.ws.push(json.dumps(message_json, indent=4))
def get_contract_id(self):
contract = json.dumps(self.contract, indent=4)
return digest(contract)
def delete(self, delete_images=True):
"""
Deletes the contract json from the OpenBazaar directory as well as the listing
metadata from the db and all the related images in the file system.
"""
# build the file_name from the contract
file_name = str(self.contract["vendor_offer"]["listing"]["item"]["title"][:100])
file_name = re.sub(r"[^\w\s]", '', file_name)
file_name = re.sub(r"\s+", '_', file_name)
file_path = DATA_FOLDER + "store/listings/contracts/" + file_name + ".json"
h = self.db.HashMap()
# maybe delete the images from disk
if "image_hashes" in self.contract["vendor_offer"]["listing"]["item"] and delete_images:
for image_hash in self.contract["vendor_offer"]["listing"]["item"]["image_hashes"]:
# delete from disk
image_path = h.get_file(unhexlify(image_hash))
if os.path.exists(image_path):
os.remove(image_path)
# remove pointer to the image from the HashMap
h.delete(unhexlify(image_hash))
# delete the contract from disk
if os.path.exists(file_path):
os.remove(file_path)
# delete the listing metadata from the db
contract_hash = digest(json.dumps(self.contract, indent=4))
self.db.ListingsStore().delete_listing(contract_hash)
# remove the pointer to the contract from the HashMap
h.delete(contract_hash)
def save(self):
"""
Saves the json contract into the OpenBazaar/store/listings/contracts/ directory.
It uses the title as the file name so it's easy on human eyes. A mapping of the
hash of the contract and file path is stored in the database so we can retrieve
the contract with only its hash.
Additionally, the contract metadata (sent in response to the GET_LISTINGS query)
is saved in the db for fast access.
"""
# get the contract title to use as the file name and format it
file_name = str(self.contract["vendor_offer"]["listing"]["item"]["title"][:100])
file_name = re.sub(r"[^\w\s]", '', file_name)
file_name = re.sub(r"\s+", '_', file_name)
# save the json contract to the file system
file_path = DATA_FOLDER + "store/listings/contracts/" + file_name + ".json"
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(self.contract, indent=4))
# Create a `ListingMetadata` protobuf object using data from the full contract
listings = Listings()
data = listings.ListingMetadata()
data.contract_hash = digest(json.dumps(self.contract, indent=4))
vendor_item = self.contract["vendor_offer"]["listing"]["item"]
data.title = vendor_item["title"]
if "image_hashes" in vendor_item:
data.thumbnail_hash = unhexlify(vendor_item["image_hashes"][0])
if "category" in vendor_item:
data.category = vendor_item["category"]
if "bitcoin" not in vendor_item["price_per_unit"]:
data.price = float(vendor_item["price_per_unit"]["fiat"]["price"])
data.currency_code = vendor_item["price_per_unit"]["fiat"][
"currency_code"]
else:
data.price = float(vendor_item["price_per_unit"]["bitcoin"])
data.currency_code = "BTC"
data.nsfw = vendor_item["nsfw"]
if "shipping" not in self.contract["vendor_offer"]["listing"]:
data.origin = CountryCode.Value("NA")
else:
data.origin = CountryCode.Value(
self.contract["vendor_offer"]["listing"]["shipping"]["shipping_origin"].upper())
for region in self.contract["vendor_offer"]["listing"]["shipping"]["shipping_regions"]:
data.ships_to.append(CountryCode.Value(region.upper()))
# save the mapping of the contract file path and contract hash in the database
self.db.HashMap().insert(data.contract_hash, file_path)
# save the `ListingMetadata` protobuf to the database as well
self.db.ListingsStore().add_listing(data)
def verify(self, sender_key):
"""
Validate that an order sent over by a buyer is filled out correctly.
"""
try:
contract_dict = json.loads(json.dumps(self.contract, indent=4), object_pairs_hook=OrderedDict)
del contract_dict["buyer_order"]
contract_hash = digest(json.dumps(contract_dict, indent=4))
ref_hash = unhexlify(self.contract["buyer_order"]["order"]["ref_hash"])
# verify that the reference hash matches the contract and that the contract actually exists
if contract_hash != ref_hash or not self.db.HashMap().get_file(ref_hash):
raise Exception("Order for contract that doesn't exist")
# verify the signature on the order
verify_key = nacl.signing.VerifyKey(sender_key)
verify_key.verify(json.dumps(self.contract["buyer_order"]["order"], indent=4),
unhexlify(self.contract["buyer_order"]["signature"]))
# verify buyer included the correct bitcoin amount for payment
price_json = self.contract["vendor_offer"]["listing"]["item"]["price_per_unit"]
if "bitcoin" in price_json:
asking_price = price_json["bitcoin"]
else:
currency_code = price_json["fiat"]["currency_code"]
fiat_price = price_json["fiat"]["price"]
request = Request('https://api.bitcoinaverage.com/ticker/' + currency_code.upper() + '/last')
response = urlopen(request)
conversion_rate = response.read()
asking_price = float("{0:.8f}".format(float(fiat_price) / float(conversion_rate)))
if asking_price > self.contract["buyer_order"]["order"]["payment"]["amount"]:
raise Exception("Insuffient Payment")
# verify a valid moderator was selected
# TODO: handle direct payments
valid_mod = False
for mod in self.contract["vendor_offer"]["listing"]["moderators"]:
if mod["guid"] == self.contract["buyer_order"]["order"]["moderator"]:
valid_mod = True
if not valid_mod:
raise Exception("Invalid moderator")
# verify all the shipping fields exist
if self.contract["vendor_offer"]["listing"]["metadata"]["category"] == "physical good":
shipping = self.contract["buyer_order"]["order"]["shipping"]
keys = ["ship_to", "address", "postal_code", "city", "state", "country"]
for value in map(shipping.get, keys):
if value is None:
raise Exception("Missing shipping field")
# verify buyer ID
pubkeys = self.contract["buyer_order"]["order"]["id"]["pubkeys"]
keys = ["guid", "bitcoin", "encryption"]
for value in map(pubkeys.get, keys):
if value is None:
raise Exception("Missing pubkey field")
# verify redeem script
chaincode = self.contract["buyer_order"]["order"]["payment"]["chaincode"]
for mod in self.contract["vendor_offer"]["listing"]["moderators"]:
if mod["guid"] == self.contract["buyer_order"]["order"]["moderator"]:
masterkey_m = mod["pubkeys"]["bitcoin"]["key"]
masterkey_v = bitcoin.bip32_extract_key(self.keychain.bitcoin_master_pubkey)
masterkey_b = self.contract["buyer_order"]["order"]["id"]["pubkeys"]["bitcoin"]
buyer_key = derive_childkey(masterkey_b, chaincode)
vendor_key = derive_childkey(masterkey_v, chaincode)
moderator_key = derive_childkey(masterkey_m, chaincode)
redeem_script = '75' + bitcoin.mk_multisig_script([buyer_key, vendor_key, moderator_key], 2)
if redeem_script != self.contract["buyer_order"]["order"]["payment"]["redeem_script"]:
raise Exception("Invalid redeem script")
# verify the payment address
if self.testnet:
payment_address = bitcoin.p2sh_scriptaddr(redeem_script, 196)
else:
payment_address = bitcoin.p2sh_scriptaddr(redeem_script)
if payment_address != self.contract["buyer_order"]["order"]["payment"]["address"]:
raise Exception("Incorrect payment address")
return True
except Exception:
return False
| {
"repo_name": "melpomene/OpenBazaar-Server",
"path": "market/contracts.py",
"copies": "3",
"size": "35443",
"license": "mit",
"hash": -1721840956126011400,
"line_mean": 48.4323570432,
"line_max": 109,
"alpha_frac": 0.5394012922,
"autogenerated": false,
"ratio": 4.499555668401676,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.002064031595575888,
"num_lines": 717
} |
__author__ = 'chris'
import json
import random
import time
import pickle
import nacl.signing
import bitcoin
from hashlib import sha256
from binascii import unhexlify, hexlify
from collections import OrderedDict
from urllib2 import Request, urlopen, URLError
from market.utils import deserialize
import re
import os
import nacl.encoding
from protos.objects import Listings
from protos.countries import CountryCode
from dht.utils import digest
from constants import DATA_FOLDER
from market.profile import Profile
from keyutils.keys import KeyChain
from keyutils.bip32utils import derive_childkey
from log import Logger
from constants import TRANSACTION_FEE
class Contract(object):
"""
A class for creating and interacting with OpenBazaar Ricardian contracts.
"""
def __init__(self, database, contract=None, hash_value=None, testnet=False):
"""
This class can be instantiated with either an `OrderedDict` or a hash
of a contract. If a hash is used, we will load the contract from either
the file system or cache.
Alternatively, pass in no parameters if the intent is to create a new
contract.
Args:
contract: an `OrderedDict` containing a filled out json contract
hash_value: a hash160 (in hex) of a contract
testnet: is this contract on the testnet
"""
self.db = database
self.keychain = KeyChain(self.db)
if contract is not None:
self.contract = contract
elif hash_value is not None:
try:
file_path = self.db.HashMap().get_file(hash_value)
if file_path is None:
file_path = DATA_FOLDER + "cache/" + hexlify(hash_value)
with open(file_path, 'r') as filename:
self.contract = json.load(filename, object_pairs_hook=OrderedDict)
except Exception:
try:
file_path = DATA_FOLDER + "purchases/in progress/" + hexlify(hash_value) + ".json"
with open(file_path, 'r') as filename:
self.contract = json.load(filename, object_pairs_hook=OrderedDict)
except Exception:
self.contract = {}
else:
self.contract = {}
self.log = Logger(system=self)
# used when purchasing this contract
self.testnet = testnet
self.notification_listener = None
self.blockchain = None
self.amount_funded = 0
self.received_txs = []
self.is_purchase = False
self.outpoints = []
def create(self,
expiration_date,
metadata_category,
title,
description,
currency_code,
price,
process_time,
nsfw,
shipping_origin=None,
shipping_regions=None,
est_delivery_domestic=None,
est_delivery_international=None,
terms_conditions=None,
returns=None,
keywords=None,
category=None,
condition=None,
sku=None,
images=None,
free_shipping=None,
shipping_currency_code=None,
shipping_domestic=None,
shipping_international=None,
options=None,
moderators=None):
"""
All parameters are strings except:
:param expiration_date: `string` (must be formatted UTC datetime)
:param keywords: `list`
:param nsfw: `boolean`
:param images: a `list` of image files
:param free_shipping: `boolean`
:param shipping_origin: a 'string' formatted `CountryCode`
:param shipping_regions: a 'list' of 'string' formatted `CountryCode`s
:param options: a 'dict' containing options as keys and 'list' as option values.
:param moderators: a 'list' of 'string' guids (hex encoded).
"""
profile = Profile(self.db).get()
self.contract = OrderedDict(
{
"vendor_offer": {
"listing": {
"metadata": {
"version": "0.1",
"category": metadata_category.lower(),
"category_sub": "fixed price"
},
"id": {
"guid": self.keychain.guid.encode("hex"),
"pubkeys": {
"guid": self.keychain.guid_signed_pubkey[64:].encode("hex"),
"bitcoin": bitcoin.bip32_extract_key(self.keychain.bitcoin_master_pubkey),
"encryption": self.keychain.encryption_pubkey.encode("hex")
}
},
"item": {
"title": title,
"description": description,
"process_time": process_time,
"price_per_unit": {},
"nsfw": nsfw
}
}
}
}
)
if expiration_date == "":
self.contract["vendor_offer"]["listing"]["metadata"]["expiry"] = "never"
else:
self.contract["vendor_offer"]["listing"]["metadata"]["expiry"] = expiration_date + " UTC"
if metadata_category == "physical good" and condition is not None:
self.contract["vendor_offer"]["listing"]["item"]["condition"] = condition
if currency_code.upper() == "BTC":
item = self.contract["vendor_offer"]["listing"]["item"]
item["price_per_unit"]["bitcoin"] = round(float(price), 8)
else:
item = self.contract["vendor_offer"]["listing"]["item"]
item["price_per_unit"]["fiat"] = {}
item["price_per_unit"]["fiat"]["price"] = price
item["price_per_unit"]["fiat"]["currency_code"] = currency_code
if keywords is not None:
self.contract["vendor_offer"]["listing"]["item"]["keywords"] = []
self.contract["vendor_offer"]["listing"]["item"]["keywords"].extend(keywords)
if category is not None:
self.contract["vendor_offer"]["listing"]["item"]["category"] = category
if sku is not None:
self.contract["vendor_offer"]["listing"]["item"]["sku"] = sku
if options is not None:
self.contract["vendor_offer"]["listing"]["item"]["options"] = options
if metadata_category == "physical good":
self.contract["vendor_offer"]["listing"]["shipping"] = {}
shipping = self.contract["vendor_offer"]["listing"]["shipping"]
shipping["shipping_origin"] = shipping_origin
if free_shipping is False:
self.contract["vendor_offer"]["listing"]["shipping"]["free"] = False
self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"] = {}
if shipping_currency_code == "BTC":
self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["bitcoin"] = {}
self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["bitcoin"][
"domestic"] = shipping_domestic
self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["bitcoin"][
"international"] = shipping_international
else:
shipping = self.contract["vendor_offer"]["listing"]["shipping"]
shipping["flat_fee"]["fiat"] = {}
shipping["flat_fee"]["fiat"]["price"] = {}
shipping["flat_fee"]["fiat"]["price"][
"domestic"] = shipping_domestic
shipping["flat_fee"]["fiat"]["price"][
"international"] = shipping_international
shipping["flat_fee"]["fiat"][
"currency_code"] = shipping_currency_code
else:
self.contract["vendor_offer"]["listing"]["shipping"]["free"] = True
self.contract["vendor_offer"]["listing"]["shipping"]["shipping_regions"] = []
for region in shipping_regions:
shipping = self.contract["vendor_offer"]["listing"]["shipping"]
shipping["shipping_regions"].append(region)
listing = self.contract["vendor_offer"]["listing"]
listing["shipping"]["est_delivery"] = {}
listing["shipping"]["est_delivery"]["domestic"] = est_delivery_domestic
listing["shipping"]["est_delivery"][
"international"] = est_delivery_international
if profile.HasField("handle"):
self.contract["vendor_offer"]["listing"]["id"]["blockchain_id"] = profile.handle
if images is not None:
self.contract["vendor_offer"]["listing"]["item"]["image_hashes"] = []
for image_hash in images:
if len(image_hash) != 40:
raise Exception("Invalid image hash")
self.contract["vendor_offer"]["listing"]["item"]["image_hashes"].append(image_hash)
if terms_conditions is not None or returns is not None:
self.contract["vendor_offer"]["listing"]["policy"] = {}
if terms_conditions is not None:
self.contract["vendor_offer"]["listing"]["policy"]["terms_conditions"] = terms_conditions
if returns is not None:
self.contract["vendor_offer"]["listing"]["policy"]["returns"] = returns
if moderators is not None:
self.contract["vendor_offer"]["listing"]["moderators"] = []
for mod in moderators:
mod_info = self.db.ModeratorStore().get_moderator(mod)
if mod_info is not None:
moderator = {
"guid": mod,
"name": mod_info[7],
"avatar": mod_info[9].encode("hex"),
"short_description": mod_info[8],
"fee": str(mod_info[10]) + "%",
"blockchain_id": mod_info[6],
"pubkeys": {
"signing": {
"key": mod_info[1][64:].encode("hex"),
"signature": mod_info[1][:64].encode("hex")
},
"encryption": {
"key": mod_info[2].encode("hex"),
"signature": mod_info[3].encode("hex")
},
"bitcoin": {
"key": mod_info[4].encode("hex"),
"signature": mod_info[5].encode("hex")
}
}
}
self.contract["vendor_offer"]["listing"]["moderators"].append(moderator)
listing = json.dumps(self.contract["vendor_offer"]["listing"], indent=4)
self.contract["vendor_offer"]["signature"] = \
self.keychain.signing_key.sign(listing, encoder=nacl.encoding.HexEncoder)[:128]
self.save()
def add_purchase_info(self,
quantity,
ship_to=None,
shipping_address=None,
city=None,
state=None,
postal_code=None,
country=None,
moderator=None,
options=None):
"""
Update the contract with the buyer's purchase information.
"""
profile = Profile(self.db).get()
order_json = {
"buyer_order": {
"order": {
"ref_hash": digest(json.dumps(self.contract, indent=4)).encode("hex"),
"quantity": quantity,
"id": {
"guid": self.keychain.guid.encode("hex"),
"pubkeys": {
"guid": self.keychain.guid_signed_pubkey[64:].encode("hex"),
"bitcoin": bitcoin.bip32_extract_key(self.keychain.bitcoin_master_pubkey),
"encryption": self.keychain.encryption_pubkey.encode("hex")
}
},
"payment": {}
}
}
}
if profile.HasField("handle"):
order_json["buyer_order"]["order"]["id"]["blockchain_id"] = profile.handle
if self.contract["vendor_offer"]["listing"]["metadata"]["category"] == "physical good":
order_json["buyer_order"]["order"]["shipping"] = {}
order_json["buyer_order"]["order"]["shipping"]["ship_to"] = ship_to
order_json["buyer_order"]["order"]["shipping"]["address"] = shipping_address
order_json["buyer_order"]["order"]["shipping"]["city"] = city
order_json["buyer_order"]["order"]["shipping"]["state"] = state
order_json["buyer_order"]["order"]["shipping"]["postal_code"] = postal_code
order_json["buyer_order"]["order"]["shipping"]["country"] = country
if options is not None:
order_json["buyer_order"]["order"]["options"] = options
if moderator:
chaincode = sha256(str(random.getrandbits(256))).digest().encode("hex")
order_json["buyer_order"]["order"]["payment"]["chaincode"] = chaincode
valid_mod = False
for mod in self.contract["vendor_offer"]["listing"]["moderators"]:
if mod["guid"] == moderator:
order_json["buyer_order"]["order"]["moderator"] = moderator
masterkey_m = mod["pubkeys"]["bitcoin"]["key"]
valid_mod = True
if not valid_mod:
return False
masterkey_b = bitcoin.bip32_extract_key(self.keychain.bitcoin_master_pubkey)
masterkey_v = self.contract["vendor_offer"]["listing"]["id"]["pubkeys"]["bitcoin"]
buyer_key = derive_childkey(masterkey_b, chaincode)
vendor_key = derive_childkey(masterkey_v, chaincode)
moderator_key = derive_childkey(masterkey_m, chaincode)
redeem_script = bitcoin.mk_multisig_script([buyer_key, vendor_key, moderator_key], 2)
order_json["buyer_order"]["order"]["payment"]["redeem_script"] = redeem_script
if self.testnet:
payment_address = bitcoin.p2sh_scriptaddr(redeem_script, 196)
else:
payment_address = bitcoin.p2sh_scriptaddr(redeem_script)
order_json["buyer_order"]["order"]["payment"]["address"] = payment_address
else:
chaincode = sha256(str(random.getrandbits(256))).digest().encode("hex")
order_json["buyer_order"]["order"]["payment"]["chaincode"] = chaincode
masterkey_v = self.contract["vendor_offer"]["listing"]["id"]["pubkeys"]["bitcoin"]
vendor_key = derive_childkey(masterkey_v, chaincode)
if self.testnet:
payment_address = bitcoin.pubkey_to_address(vendor_key, 111)
else:
payment_address = bitcoin.pubkey_to_address(vendor_key)
order_json["buyer_order"]["order"]["payment"]["address"] = payment_address
price_json = self.contract["vendor_offer"]["listing"]["item"]["price_per_unit"]
if "bitcoin" in price_json:
amount_to_pay = float(price_json["bitcoin"]) * quantity
else:
currency_code = price_json["fiat"]["currency_code"]
fiat_price = price_json["fiat"]["price"]
try:
request = Request('https://api.bitcoinaverage.com/ticker/' + currency_code.upper() + '/last')
response = urlopen(request)
conversion_rate = response.read()
except URLError:
return False
amount_to_pay = float("{0:.8f}".format(float(fiat_price) / float(conversion_rate))) * quantity
if "shipping" in self.contract["vendor_offer"]["listing"]:
if not self.contract["vendor_offer"]["listing"]["shipping"]["free"]:
shipping_origin = str(self.contract["vendor_offer"]["listing"]["shipping"][
"shipping_origin"].upper())
if shipping_origin == country.upper():
if "bitcoin" in self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]:
shipping_amount = float(self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"][
"bitcoin"]["domestic"]) * quantity
else:
price = self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["fiat"][
"price"]["domestic"]
currency = self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"][
"fiat"]["currency_code"]
try:
request = Request('https://api.bitcoinaverage.com/ticker/' + currency.upper() + '/last')
response = urlopen(request)
conversion_rate = response.read()
except URLError:
return False
shipping_amount = round(
float("{0:.8f}".format(float(price) / float(conversion_rate))) * quantity, 8)
else:
if "bitcoin" in self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]:
shipping_amount = float(self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"][
"bitcoin"]["international"]) * quantity
else:
price = self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["fiat"][
"price"]["international"]
currency = self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"][
"fiat"]["currency_code"]
try:
request = Request('https://api.bitcoinaverage.com/ticker/' + currency.upper() + '/last')
response = urlopen(request)
conversion_rate = response.read()
except URLError:
return False
shipping_amount = round(
float("{0:.8f}".format(float(price) / float(conversion_rate))) * quantity, 8)
amount_to_pay += shipping_amount
order_json["buyer_order"]["order"]["payment"]["amount"] = amount_to_pay
self.contract["buyer_order"] = order_json["buyer_order"]
order = json.dumps(self.contract["buyer_order"]["order"], indent=4)
# TODO: This should also be signed with the bitcoin key. It's the only way a moderator
# will have to link this contract to a bitcoin transaction.
self.contract["buyer_order"]["signature"] = \
self.keychain.signing_key.sign(order, encoder=nacl.encoding.HexEncoder)[:128]
return (self.contract["buyer_order"]["order"]["payment"]["address"],
order_json["buyer_order"]["order"]["payment"]["amount"])
def add_order_confirmation(self,
libbitcoin_client,
payout_address,
comments=None,
shipper=None,
tracking_number=None,
est_delivery=None,
url=None,
password=None):
"""
Add the vendor's order confirmation to the contract.
"""
self.blockchain = libbitcoin_client
if not self.testnet and not (payout_address[:1] == "1" or payout_address[:1] == "3"):
raise Exception("Bitcoin address is not a mainnet address")
elif self.testnet and not \
(payout_address[:1] == "n" or payout_address[:1] == "m" or payout_address[:1] == "2"):
raise Exception("Bitcoin address is not a testnet address")
try:
bitcoin.b58check_to_hex(payout_address)
except AssertionError:
raise Exception("Invalid Bitcoin address")
conf_json = {
"vendor_order_confirmation": {
"invoice": {
"ref_hash": digest(json.dumps(self.contract, indent=4)).encode("hex")
}
}
}
if self.contract["vendor_offer"]["listing"]["metadata"]["category"] == "physical good":
shipping = {"shipper": shipper, "tracking_number": tracking_number, "est_delivery": est_delivery}
conf_json["vendor_order_confirmation"]["invoice"]["shipping"] = shipping
elif self.contract["vendor_offer"]["listing"]["metadata"]["category"] == "digital good":
content_source = {"url": url, "password": password}
conf_json["vendor_order_confirmation"]["invoice"]["content_source"] = content_source
if comments:
conf_json["vendor_order_confirmation"]["invoice"]["comments"] = comments
order_id = digest(json.dumps(self.contract, indent=4)).encode("hex")
# apply signatures
outpoints = pickle.loads(self.db.Sales().get_outpoint(order_id))
if "moderator" in self.contract["buyer_order"]["order"]:
redeem_script = self.contract["buyer_order"]["order"]["payment"]["redeem_script"]
value = 0
for output in outpoints:
value += output["value"]
del output["value"]
value -= TRANSACTION_FEE
outs = [{'value': value, 'address': payout_address}]
tx = bitcoin.mktx(outpoints, outs)
signatures = []
chaincode = self.contract["buyer_order"]["order"]["payment"]["chaincode"]
masterkey_v = bitcoin.bip32_extract_key(self.keychain.bitcoin_master_privkey)
vendor_priv = derive_childkey(masterkey_v, chaincode, bitcoin.MAINNET_PRIVATE)
for index in range(0, len(outpoints)):
sig = bitcoin.multisign(tx, index, redeem_script, vendor_priv)
signatures.append({"input_index": index, "signature": sig})
conf_json["vendor_order_confirmation"]["invoice"]["payout"] = {}
conf_json["vendor_order_confirmation"]["invoice"]["payout"]["address"] = payout_address
conf_json["vendor_order_confirmation"]["invoice"]["payout"]["value"] = value
conf_json["vendor_order_confirmation"]["invoice"]["payout"]["signature(s)"] = signatures
else:
value = 0
for output in outpoints:
value += output["value"]
del output["value"]
value -= TRANSACTION_FEE
outs = [{'value': value, 'address': payout_address}]
tx = bitcoin.mktx(outpoints, outs)
chaincode = self.contract["buyer_order"]["order"]["payment"]["chaincode"]
masterkey_v = bitcoin.bip32_extract_key(self.keychain.bitcoin_master_privkey)
vendor_priv = derive_childkey(masterkey_v, chaincode, bitcoin.MAINNET_PRIVATE)
for index in range(0, len(outpoints)):
tx = bitcoin.sign(tx, index, vendor_priv)
self.blockchain.broadcast(tx)
self.db.Sales().update_payment_tx(order_id, bitcoin.txhash(tx))
confirmation = json.dumps(conf_json["vendor_order_confirmation"]["invoice"], indent=4)
conf_json["vendor_order_confirmation"]["signature"] = \
self.keychain.signing_key.sign(confirmation, encoder=nacl.encoding.HexEncoder)[:128]
self.contract["vendor_order_confirmation"] = conf_json["vendor_order_confirmation"]
self.db.Sales().update_status(order_id, 2)
file_path = DATA_FOLDER + "store/listings/in progress/" + order_id + ".json"
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(self.contract, indent=4))
def accept_order_confirmation(self, notification_listener, confirmation_json=None):
"""
Validate the order confirmation sent over from the seller and update our node accordingly.
"""
self.notification_listener = notification_listener
try:
if confirmation_json:
self.contract["vendor_order_confirmation"] = json.loads(confirmation_json,
object_pairs_hook=OrderedDict)
contract_dict = json.loads(json.dumps(self.contract, indent=4), object_pairs_hook=OrderedDict)
del contract_dict["vendor_order_confirmation"]
contract_hash = digest(json.dumps(contract_dict, indent=4)).encode("hex")
ref_hash = self.contract["vendor_order_confirmation"]["invoice"]["ref_hash"]
if ref_hash != contract_hash:
raise Exception("Order number doesn't match")
if self.contract["vendor_offer"]["listing"]["metadata"]["category"] == "physical good":
shipping = self.contract["vendor_order_confirmation"]["invoice"]["shipping"]
if "tracking_number" not in shipping or "shipper" not in shipping:
raise Exception("No shipping information")
# TODO: verify signature
# TODO: verify payout object
# update the order status in the db
self.db.Purchases().update_status(contract_hash, 2)
file_path = DATA_FOLDER + "purchases/in progress/" + contract_hash + ".json"
# update the contract in the file system
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(self.contract, indent=4))
title = self.contract["vendor_offer"]["listing"]["item"]["title"]
if "image_hashes" in self.contract["vendor_offer"]["listing"]["item"]:
image_hash = unhexlify(self.contract["vendor_offer"]["listing"]["item"]["image_hashes"][0])
else:
image_hash = ""
if "blockchain_id" in self.contract["vendor_offer"]["listing"]["id"]:
handle = self.contract["vendor_offer"]["listing"]["id"]["blockchain_id"]
else:
handle = ""
vendor_guid = self.contract["vendor_offer"]["listing"]["id"]["guid"]
self.notification_listener.notify(vendor_guid, handle, "order confirmation", contract_hash, title,
image_hash)
return contract_hash
except Exception:
return False
def add_receipt(self,
received,
libbitcoin_client,
feedback=None,
quality=None,
description=None,
delivery_time=None,
customer_service=None,
review="",
dispute=False,
claim=None,
payout=True):
"""
Add the final piece of the contract that appends the review and payout transaction.
"""
self.blockchain = libbitcoin_client
receipt_json = {
"buyer_receipt": {
"receipt": {
"ref_hash": digest(json.dumps(self.contract, indent=4)).encode("hex"),
"listing": {
"received": received,
"listing_hash": self.contract["buyer_order"]["order"]["ref_hash"]
},
"dispute": {
"dispute": dispute
}
}
}
}
if None not in (feedback, quality, description, delivery_time, customer_service):
receipt_json["buyer_receipt"]["receipt"]["rating"] = {}
receipt_json["buyer_receipt"]["receipt"]["rating"]["feedback"] = feedback
receipt_json["buyer_receipt"]["receipt"]["rating"]["quality"] = quality
receipt_json["buyer_receipt"]["receipt"]["rating"]["description"] = description
receipt_json["buyer_receipt"]["receipt"]["rating"]["delivery_time"] = delivery_time
receipt_json["buyer_receipt"]["receipt"]["rating"]["customer_service"] = customer_service
receipt_json["buyer_receipt"]["receipt"]["rating"]["review"] = review
order_id = self.contract["vendor_order_confirmation"]["invoice"]["ref_hash"]
if payout and "moderator" in self.contract["buyer_order"]["order"]:
outpoints = pickle.loads(self.db.Purchases().get_outpoint(order_id))
payout_address = self.contract["vendor_order_confirmation"]["invoice"]["payout"]["address"]
redeem_script = str(self.contract["buyer_order"]["order"]["payment"]["redeem_script"])
for output in outpoints:
del output["value"]
value = self.contract["vendor_order_confirmation"]["invoice"]["payout"]["value"]
outs = [{'value': value, 'address': payout_address}]
tx = bitcoin.mktx(outpoints, outs)
signatures = []
chaincode = self.contract["buyer_order"]["order"]["payment"]["chaincode"]
masterkey_b = bitcoin.bip32_extract_key(self.keychain.bitcoin_master_privkey)
buyer_priv = derive_childkey(masterkey_b, chaincode, bitcoin.MAINNET_PRIVATE)
masterkey_v = self.contract["vendor_offer"]["listing"]["id"]["pubkeys"]["bitcoin"]
vendor_key = derive_childkey(masterkey_v, chaincode)
valid_inputs = 0
for index in range(0, len(outpoints)):
sig = bitcoin.multisign(tx, index, redeem_script, buyer_priv)
signatures.append({"input_index": index, "signature": sig})
for s in self.contract["vendor_order_confirmation"]["invoice"]["payout"]["signature(s)"]:
if s["input_index"] == index:
if bitcoin.verify_tx_input(tx, index, redeem_script, s["signature"], vendor_key):
tx = bitcoin.apply_multisignatures(tx, index, str(redeem_script),
sig, str(s["signature"]))
valid_inputs += 1
receipt_json["buyer_receipt"]["receipt"]["payout"] = {}
if valid_inputs == len(outpoints):
self.log.info("Broadcasting payout tx %s to network" % bitcoin.txhash(tx))
self.blockchain.broadcast(tx)
receipt_json["buyer_receipt"]["receipt"]["payout"]["txid"] = bitcoin.txhash(tx)
receipt_json["buyer_receipt"]["receipt"]["payout"]["signature(s)"] = signatures
receipt_json["buyer_receipt"]["receipt"]["payout"]["value"] = value
if claim:
receipt_json["buyer_receipt"]["receipt"]["dispute"]["claim"] = claim
receipt = json.dumps(receipt_json["buyer_receipt"]["receipt"], indent=4)
receipt_json["buyer_receipt"]["signature"] = \
self.keychain.signing_key.sign(receipt, encoder=nacl.encoding.HexEncoder)[:128]
self.contract["buyer_receipt"] = receipt_json["buyer_receipt"]
self.db.Purchases().update_status(order_id, 3)
file_path = DATA_FOLDER + "purchases/trade receipts/" + order_id + ".json"
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(self.contract, indent=4))
file_path = DATA_FOLDER + "purchases/in progress/" + order_id + ".json"
if os.path.exists(file_path):
os.remove(file_path)
def accept_receipt(self, notification_listener, blockchain, receipt_json=None):
"""
Process the final receipt sent over by the buyer. If valid, broadcast the transaction
to the bitcoin network.
"""
self.notification_listener = notification_listener
self.blockchain = blockchain
if receipt_json:
self.contract["buyer_receipt"] = json.loads(receipt_json,
object_pairs_hook=OrderedDict)
contract_dict = json.loads(json.dumps(self.contract, indent=4), object_pairs_hook=OrderedDict)
del contract_dict["buyer_receipt"]
contract_hash = digest(json.dumps(contract_dict, indent=4)).encode("hex")
ref_hash = self.contract["buyer_receipt"]["receipt"]["ref_hash"]
if ref_hash != contract_hash:
raise Exception("Order number doesn't match")
# The buyer may have sent over this whole contract, make sure the data we added wasn't manipulated.
verify_key = self.keychain.signing_key.verify_key
verify_key.verify(json.dumps(self.contract["vendor_order_confirmation"]["invoice"], indent=4),
unhexlify(self.contract["vendor_order_confirmation"]["signature"]))
# TODO: verify buyer signature
order_id = self.contract["vendor_order_confirmation"]["invoice"]["ref_hash"]
if "moderator" in self.contract["buyer_order"]["order"]:
outpoints = pickle.loads(self.db.Sales().get_outpoint(order_id))
payout_address = self.contract["vendor_order_confirmation"]["invoice"]["payout"]["address"]
redeem_script = str(self.contract["buyer_order"]["order"]["payment"]["redeem_script"])
for output in outpoints:
del output["value"]
value = self.contract["vendor_order_confirmation"]["invoice"]["payout"]["value"]
outs = [{'value': value, 'address': payout_address}]
tx = bitcoin.mktx(outpoints, outs)
chaincode = self.contract["buyer_order"]["order"]["payment"]["chaincode"]
masterkey_b = self.contract["buyer_order"]["order"]["id"]["pubkeys"]["bitcoin"]
buyer_key = derive_childkey(masterkey_b, chaincode)
vendor_sigs = self.contract["vendor_order_confirmation"]["invoice"]["payout"]["signature(s)"]
buyer_sigs = self.contract["buyer_receipt"]["receipt"]["payout"]["signature(s)"]
for index in range(0, len(outpoints)):
for s in vendor_sigs:
if s["input_index"] == index:
sig2 = str(s["signature"])
for s in buyer_sigs:
if s["input_index"] == index:
sig1 = str(s["signature"])
if bitcoin.verify_tx_input(tx, index, redeem_script, sig1, buyer_key):
tx_signed = bitcoin.apply_multisignatures(tx, index, str(redeem_script), sig1, sig2)
else:
raise Exception("Buyer sent invalid signature")
self.log.info("Broadcasting payout tx %s to network" % bitcoin.txhash(tx_signed))
self.blockchain.broadcast(tx_signed)
self.db.Sales().update_payment_tx(order_id, bitcoin.txhash(tx_signed))
title = self.contract["vendor_offer"]["listing"]["item"]["title"]
if "image_hashes" in self.contract["vendor_offer"]["listing"]["item"]:
image_hash = unhexlify(self.contract["vendor_offer"]["listing"]["item"]["image_hashes"][0])
else:
image_hash = ""
buyer_guid = self.contract["buyer_order"]["order"]["id"]["guid"]
if "blockchain_id" in self.contract["buyer_order"]["order"]["id"]:
handle = self.contract["buyer_order"]["order"]["id"]["blockchain_id"]
else:
handle = ""
self.notification_listener.notify(buyer_guid, handle, "payment received", order_id, title, image_hash)
self.db.Sales().update_status(order_id, 3)
file_path = DATA_FOLDER + "store/listings/trade receipts/" + order_id + ".json"
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(self.contract, indent=4))
file_path = DATA_FOLDER + "store/listings/in progress/" + order_id + ".json"
if os.path.exists(file_path):
os.remove(file_path)
return order_id
def await_funding(self, notification_listener, libbitcoin_client, proofSig, is_purchase=True):
"""
Saves the contract to the file system and db as an unfunded contract.
Listens on the libbitcoin server for the multisig address to be funded.
"""
self.notification_listener = notification_listener
self.blockchain = libbitcoin_client
self.is_purchase = is_purchase
order_id = digest(json.dumps(self.contract, indent=4)).encode("hex")
payment_address = self.contract["buyer_order"]["order"]["payment"]["address"]
vendor_item = self.contract["vendor_offer"]["listing"]["item"]
if "image_hashes" in vendor_item:
thumbnail_hash = vendor_item["image_hashes"][0]
else:
thumbnail_hash = ""
if "blockchain_id" in self.contract["vendor_offer"]["listing"]["id"] \
and self.contract["vendor_offer"]["listing"]["id"]["blockchain_id"] != "":
vendor = self.contract["vendor_offer"]["listing"]["id"]["blockchain_id"]
else:
vendor = self.contract["vendor_offer"]["listing"]["id"]["guid"]
if "blockchain_id" in self.contract["buyer_order"]["order"]["id"] \
and self.contract["buyer_order"]["order"]["id"]["blockchain_id"] != "":
buyer = self.contract["buyer_order"]["order"]["id"]["blockchain_id"]
else:
buyer = self.contract["buyer_order"]["order"]["id"]["guid"]
if is_purchase:
file_path = DATA_FOLDER + "purchases/unfunded/" + order_id + ".json"
self.db.Purchases().new_purchase(order_id,
self.contract["vendor_offer"]["listing"]["item"]["title"],
self.contract["vendor_offer"]["listing"]["item"]["description"],
time.time(),
self.contract["buyer_order"]["order"]["payment"]["amount"],
payment_address,
0,
thumbnail_hash,
vendor,
proofSig,
self.contract["vendor_offer"]["listing"]["metadata"]["category"])
else:
file_path = DATA_FOLDER + "store/listings/unfunded/" + order_id + ".json"
self.db.Sales().new_sale(order_id,
self.contract["vendor_offer"]["listing"]["item"]["title"],
self.contract["vendor_offer"]["listing"]["item"]["description"],
time.time(),
self.contract["buyer_order"]["order"]["payment"]["amount"],
payment_address,
0,
thumbnail_hash,
buyer,
self.contract["vendor_offer"]["listing"]["metadata"]["category"])
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(self.contract, indent=4))
self.blockchain.subscribe_address(str(payment_address), notification_cb=self.on_tx_received)
def on_tx_received(self, address_version, address_hash, height, block_hash, tx):
"""
Fire when the libbitcoin server tells us we received a payment to this funding address.
While unlikely, a user may send multiple transactions to the funding address to reach the
funding level. We need to keep a running balance and increment it when a new transaction
is received. If the contract is fully funded, we push a notification to the websockets.
"""
try:
# decode the transaction
self.log.info("Bitcoin transaction detected")
transaction = deserialize(tx.encode("hex"))
# get the amount (in satoshi) the user is expected to pay
amount_to_pay = int(float(self.contract["buyer_order"]["order"]["payment"]["amount"]) * 100000000)
if tx not in self.received_txs: # make sure we aren't parsing the same tx twice.
if "moderator" in self.contract["buyer_order"]["order"]:
output_script = 'a914' + digest(unhexlify(
self.contract["buyer_order"]["order"]["payment"]["redeem_script"])).encode("hex") + '87'
else:
output_script = '76a914' + bitcoin.b58check_to_hex(
self.contract["buyer_order"]["order"]["payment"]["address"]) +'88ac'
for output in transaction["outs"]:
if output["script"] == output_script:
self.amount_funded += output["value"]
if tx not in self.received_txs:
self.received_txs.append(tx)
self.outpoints.append({"output": bitcoin.txhash(tx.encode("hex")) +
":" + str(output["index"]), "value": output["value"]})
if self.amount_funded >= amount_to_pay: # if fully funded
self.blockchain.unsubscribe_address(
self.contract["buyer_order"]["order"]["payment"]["address"], self.on_tx_received)
order_id = digest(json.dumps(self.contract, indent=4)).encode("hex")
title = self.contract["vendor_offer"]["listing"]["item"]["title"]
if "image_hashes" in self.contract["vendor_offer"]["listing"]["item"]:
image_hash = unhexlify(self.contract["vendor_offer"]["listing"]["item"]["image_hashes"][0])
else:
image_hash = ""
if self.is_purchase:
unfunded_path = DATA_FOLDER + "purchases/unfunded/" + order_id + ".json"
in_progress_path = DATA_FOLDER + "purchases/in progress/" + order_id + ".json"
if "blockchain_id" in self.contract["vendor_offer"]["listing"]["id"]:
handle = self.contract["vendor_offer"]["listing"]["id"]["blockchain_id"]
else:
handle = ""
vendor_guid = self.contract["vendor_offer"]["listing"]["id"]["guid"]
self.notification_listener.notify(unhexlify(vendor_guid), handle, "payment received",
order_id, title, image_hash)
# update the db
self.db.Purchases().update_status(order_id, 1)
self.db.Purchases().update_outpoint(order_id, pickle.dumps(self.outpoints))
self.log.info("Payment for order id %s successfully broadcast to network." % order_id)
else:
unfunded_path = DATA_FOLDER + "store/listings/unfunded/" + order_id + ".json"
in_progress_path = DATA_FOLDER + "store/listings/in progress/" + order_id + ".json"
buyer_guid = self.contract["buyer_order"]["order"]["id"]["guid"]
if "blockchain_id" in self.contract["buyer_order"]["order"]["id"]:
handle = self.contract["buyer_order"]["order"]["id"]["blockchain_id"]
else:
handle = ""
self.notification_listener.notify(unhexlify(buyer_guid), handle, "new order", order_id,
title, image_hash)
self.db.Sales().update_status(order_id, 1)
self.db.Sales().update_outpoint(order_id, pickle.dumps(self.outpoints))
self.log.info("Received new order %s" % order_id)
os.rename(unfunded_path, in_progress_path)
except Exception:
self.log.critical("Error processing bitcoin transaction")
def get_contract_id(self):
contract = json.dumps(self.contract, indent=4)
return digest(contract)
def delete(self, delete_images=False):
"""
Deletes the contract json from the OpenBazaar directory as well as the listing
metadata from the db and all the related images in the file system.
"""
# get the file path
h = self.db.HashMap()
file_path = h.get_file(digest(json.dumps(self.contract, indent=4)).encode("hex"))
# maybe delete the images from disk
if "image_hashes" in self.contract["vendor_offer"]["listing"]["item"] and delete_images:
for image_hash in self.contract["vendor_offer"]["listing"]["item"]["image_hashes"]:
# delete from disk
image_path = h.get_file(image_hash)
if os.path.exists(image_path):
os.remove(image_path)
# remove pointer to the image from the HashMap
h.delete(image_hash)
# delete the contract from disk
if os.path.exists(file_path):
os.remove(file_path)
# delete the listing metadata from the db
contract_hash = digest(json.dumps(self.contract, indent=4))
self.db.ListingsStore().delete_listing(contract_hash)
# remove the pointer to the contract from the HashMap
h.delete(contract_hash.encode("hex"))
def save(self):
"""
Saves the json contract into the OpenBazaar/store/listings/contracts/ directory.
It uses the title as the file name so it's easy on human eyes. A mapping of the
hash of the contract and file path is stored in the database so we can retrieve
the contract with only its hash.
Additionally, the contract metadata (sent in response to the GET_LISTINGS query)
is saved in the db for fast access.
"""
# get the contract title to use as the file name and format it
file_name = str(self.contract["vendor_offer"]["listing"]["item"]["title"][:100])
file_name = re.sub(r"[^\w\s]", '', file_name)
file_name = re.sub(r"\s+", '_', file_name)
file_name += digest(json.dumps(self.contract, indent=4)).encode("hex")[:8]
# save the json contract to the file system
file_path = DATA_FOLDER + "store/listings/contracts/" + file_name + ".json"
with open(file_path, 'w') as outfile:
outfile.write(json.dumps(self.contract, indent=4))
# Create a `ListingMetadata` protobuf object using data from the full contract
listings = Listings()
data = listings.ListingMetadata()
data.contract_hash = digest(json.dumps(self.contract, indent=4))
vendor_item = self.contract["vendor_offer"]["listing"]["item"]
data.title = vendor_item["title"]
if "image_hashes" in vendor_item:
data.thumbnail_hash = unhexlify(vendor_item["image_hashes"][0])
if "category" in vendor_item:
data.category = vendor_item["category"]
if "bitcoin" not in vendor_item["price_per_unit"]:
data.price = float(vendor_item["price_per_unit"]["fiat"]["price"])
data.currency_code = vendor_item["price_per_unit"]["fiat"][
"currency_code"]
else:
data.price = float(vendor_item["price_per_unit"]["bitcoin"])
data.currency_code = "BTC"
data.nsfw = vendor_item["nsfw"]
if "shipping" not in self.contract["vendor_offer"]["listing"]:
data.origin = CountryCode.Value("NA")
else:
data.origin = CountryCode.Value(
self.contract["vendor_offer"]["listing"]["shipping"]["shipping_origin"].upper())
for region in self.contract["vendor_offer"]["listing"]["shipping"]["shipping_regions"]:
data.ships_to.append(CountryCode.Value(region.upper()))
# save the mapping of the contract file path and contract hash in the database
self.db.HashMap().insert(data.contract_hash.encode("hex"), file_path)
# save the `ListingMetadata` protobuf to the database as well
self.db.ListingsStore().add_listing(data)
def verify(self, sender_key):
"""
Validate that an order sent over by a buyer is filled out correctly.
"""
try:
contract_dict = json.loads(json.dumps(self.contract, indent=4), object_pairs_hook=OrderedDict)
del contract_dict["buyer_order"]
contract_hash = digest(json.dumps(contract_dict, indent=4))
ref_hash = unhexlify(self.contract["buyer_order"]["order"]["ref_hash"])
# verify that the reference hash matches the contract and that the contract actually exists
if contract_hash != ref_hash or not self.db.HashMap().get_file(ref_hash.encode("hex")):
raise Exception("Order for contract that doesn't exist")
# verify the signature on the order
verify_key = nacl.signing.VerifyKey(sender_key)
verify_key.verify(json.dumps(self.contract["buyer_order"]["order"], indent=4),
unhexlify(self.contract["buyer_order"]["signature"]))
# TODO: verify the bitcoin signature after we add it
# verify buyer included the correct bitcoin amount for payment
quantity = int(self.contract["buyer_order"]["order"]["quantity"])
price_json = self.contract["vendor_offer"]["listing"]["item"]["price_per_unit"]
if "bitcoin" in price_json:
asking_price = float(price_json["bitcoin"]) * quantity
else:
currency_code = price_json["fiat"]["currency_code"]
fiat_price = price_json["fiat"]["price"]
request = Request('https://api.bitcoinaverage.com/ticker/' + currency_code.upper() + '/last')
response = urlopen(request)
conversion_rate = response.read()
asking_price = float("{0:.8f}".format(float(fiat_price) / float(conversion_rate))) * quantity
if "shipping" in self.contract["vendor_offer"]["listing"]:
if not self.contract["vendor_offer"]["listing"]["shipping"]["free"]:
shipping_origin = self.contract["vendor_offer"]["listing"]["shipping"][
"shipping_origin"].upper()
if shipping_origin == self.contract["buyer_order"]["order"]["shipping"]["country"].upper():
if "bitcoin" in self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]:
shipping_amount = float(self.contract["vendor_offer"]["listing"]["shipping"][
"flat_fee"]["bitcoin"]["domestic"]) * quantity
else:
price = self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["fiat"][
"price"]["domestic"]
currency = self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"][
"fiat"]["currency_code"]
try:
request = Request('https://api.bitcoinaverage.com/ticker/' +
currency.upper() + '/last')
response = urlopen(request)
conversion_rate = response.read()
except URLError:
return False
shipping_amount = round(float("{0:.8f}".format(float(price) /
float(conversion_rate))) * quantity, 8)
else:
if "bitcoin" in self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]:
shipping_amount = float(self.contract["vendor_offer"]["listing"]["shipping"][
"flat_fee"]["bitcoin"]["international"]) * quantity
else:
price = self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"]["fiat"][
"price"]["international"]
currency = self.contract["vendor_offer"]["listing"]["shipping"]["flat_fee"][
"fiat"]["currency_code"]
try:
request = Request('https://api.bitcoinaverage.com/ticker/' +
currency.upper() + '/last')
response = urlopen(request)
conversion_rate = response.read()
except URLError:
return False
shipping_amount = round(float("{0:.8f}".format(float(price) /
float(conversion_rate))) * quantity, 8)
asking_price += shipping_amount
if float(asking_price) > float(self.contract["buyer_order"]["order"]["payment"]["amount"]):
raise Exception("Insuffient Payment")
if "moderator" in self.contract["buyer_order"]["order"]:
# verify a valid moderator was selected
valid_mod = False
for mod in self.contract["vendor_offer"]["listing"]["moderators"]:
if mod["guid"] == self.contract["buyer_order"]["order"]["moderator"]:
valid_mod = True
if not valid_mod:
raise Exception("Invalid moderator")
# verify redeem script
chaincode = self.contract["buyer_order"]["order"]["payment"]["chaincode"]
for mod in self.contract["vendor_offer"]["listing"]["moderators"]:
if mod["guid"] == self.contract["buyer_order"]["order"]["moderator"]:
masterkey_m = mod["pubkeys"]["bitcoin"]["key"]
masterkey_b = self.contract["buyer_order"]["order"]["id"]["pubkeys"]["bitcoin"]
masterkey_v = bitcoin.bip32_extract_key(self.keychain.bitcoin_master_pubkey)
buyer_key = derive_childkey(masterkey_b, chaincode)
vendor_key = derive_childkey(masterkey_v, chaincode)
moderator_key = derive_childkey(masterkey_m, chaincode)
redeem_script = bitcoin.mk_multisig_script([buyer_key, vendor_key, moderator_key], 2)
if redeem_script != self.contract["buyer_order"]["order"]["payment"]["redeem_script"]:
raise Exception("Invalid redeem script")
else:
# verify the direct payment address
chaincode = self.contract["buyer_order"]["order"]["payment"]["chaincode"]
masterkey_v = bitcoin.bip32_extract_key(self.keychain.bitcoin_master_pubkey)
vendor_key = derive_childkey(masterkey_v, chaincode)
# verify the payment address
if self.testnet:
payment_address = bitcoin.pubkey_to_address(vendor_key, 111)
else:
payment_address = bitcoin.pubkey_to_address(vendor_key)
if payment_address != self.contract["buyer_order"]["order"]["payment"]["address"]:
raise Exception("Incorrect payment address")
# verify all the shipping fields exist
if self.contract["vendor_offer"]["listing"]["metadata"]["category"] == "physical good":
shipping = self.contract["buyer_order"]["order"]["shipping"]
keys = ["ship_to", "address", "postal_code", "city", "state", "country"]
for value in map(shipping.get, keys):
if value is None:
raise Exception("Missing shipping field")
# verify buyer ID
pubkeys = self.contract["buyer_order"]["order"]["id"]["pubkeys"]
keys = ["guid", "bitcoin", "encryption"]
for value in map(pubkeys.get, keys):
if value is None:
raise Exception("Missing pubkey field")
return True
except Exception:
return False
def __repr__(self):
return json.dumps(self.contract, indent=4)
def check_unfunded_for_payment(db, libbitcoin_client, notification_listener, testnet=False):
"""
Run through the unfunded contracts in our database and query the
libbitcoin server to see if they received a payment.
"""
def check(order_ids, is_purchase=True):
for order_id in order_ids:
try:
if is_purchase:
file_path = DATA_FOLDER + "purchases/unfunded/" + order_id[0] + ".json"
else:
file_path = DATA_FOLDER + "store/listings/unfunded/" + order_id[0] + ".json"
with open(file_path, 'r') as filename:
order = json.load(filename, object_pairs_hook=OrderedDict)
c = Contract(db, contract=order, testnet=testnet)
c.blockchain = libbitcoin_client
c.notification_listener = notification_listener
c.is_purchase = is_purchase
addr = c.contract["buyer_order"]["order"]["payment"]["address"]
def history_fetched(ec, history):
if not ec:
# pylint: disable=W0612
# pylint: disable=W0640
for objid, txhash, index, height, value in history:
def cb_txpool(ec, result):
if ec:
libbitcoin_client.fetch_transaction(txhash, cb_chain)
else:
c.on_tx_received(None, None, None, None, result)
def cb_chain(ec, result):
if not ec:
c.on_tx_received(None, None, None, None, result)
libbitcoin_client.fetch_txpool_transaction(txhash, cb_txpool)
libbitcoin_client.fetch_history2(addr, history_fetched)
except Exception:
pass
check(db.Purchases().get_unfunded(), True)
check(db.Sales().get_unfunded(), False)
| {
"repo_name": "hauxir/OpenBazaar-Server",
"path": "market/contracts.py",
"copies": "1",
"size": "59414",
"license": "mit",
"hash": -2857226429407291400,
"line_mean": 53.3586459286,
"line_max": 116,
"alpha_frac": 0.5310532871,
"autogenerated": false,
"ratio": 4.561185321664364,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.002744115831849682,
"num_lines": 1093
} |
__author__ = 'chris'
import json
import time
import random
from log import Logger
from api.utils import sanitize_html
from interfaces import MessageListener, BroadcastListener, NotificationListener
from zope.interface import implements
from protos.objects import PlaintextMessage, Following
from dht.utils import digest
class MessageListenerImpl(object):
implements(MessageListener)
def __init__(self, web_socket_factory, database):
self.ws = web_socket_factory
self.db = database
self.log = Logger(system=self)
def notify(self, plaintext, signature):
try:
success = self.db.messages.save_message(plaintext.sender_guid.encode("hex"),
plaintext.handle, plaintext.pubkey,
plaintext.subject, PlaintextMessage.Type.Name(plaintext.type),
plaintext.message, plaintext.timestamp, plaintext.avatar_hash,
signature, False)
if plaintext.subject != "":
self.db.purchases.update_unread(plaintext.subject)
self.db.sales.update_unread(plaintext.subject)
self.db.cases.update_unread(plaintext.subject)
if success:
message_json = {
"message": {
"sender": plaintext.sender_guid.encode("hex"),
"subject": plaintext.subject,
"message_type": PlaintextMessage.Type.Name(plaintext.type),
"message": plaintext.message,
"timestamp": plaintext.timestamp,
"avatar_hash": plaintext.avatar_hash.encode("hex"),
"public_key": plaintext.pubkey.encode("hex")
}
}
if plaintext.handle:
message_json["message"]["handle"] = plaintext.handle
self.ws.push(json.dumps(sanitize_html(message_json), indent=4))
except Exception as e:
self.log.error('Market.Listener.notify Exception: %s' % e)
class BroadcastListenerImpl(object):
implements(BroadcastListener)
def __init__(self, web_socket_factory, database):
self.ws = web_socket_factory
self.db = database
def notify(self, guid, message):
# pull the metadata for this node from the db
f = Following()
ser = self.db.follow.get_following()
handle = ""
avatar_hash = ""
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == guid:
avatar_hash = user.metadata.avatar_hash
handle = user.metadata.handle
timestamp = int(time.time())
broadcast_id = digest(random.getrandbits(255)).encode("hex")
self.db.broadcasts.save_broadcast(broadcast_id, guid.encode("hex"), handle, message,
timestamp, avatar_hash)
broadcast_json = {
"broadcast": {
"id": broadcast_id,
"guid": guid.encode("hex"),
"handle": handle,
"message": message,
"timestamp": timestamp,
"avatar_hash": avatar_hash.encode("hex")
}
}
self.ws.push(json.dumps(sanitize_html(broadcast_json), indent=4))
class NotificationListenerImpl(object):
implements(NotificationListener)
def __init__(self, web_socket_factory, database):
self.ws = web_socket_factory
self.db = database
def notify(self, guid, handle, notif_type, order_id, title, image_hash):
timestamp = int(time.time())
notif_id = digest(random.getrandbits(255)).encode("hex")
self.db.notifications.save_notification(notif_id, guid.encode("hex"), handle, notif_type, order_id,
title, timestamp, image_hash)
notification_json = {
"notification": {
"id": notif_id,
"guid": guid.encode("hex"),
"handle": handle,
"type": notif_type,
"order_id": order_id,
"title": title,
"timestamp": timestamp,
"image_hash": image_hash.encode("hex")
}
}
self.ws.push(json.dumps(sanitize_html(notification_json), indent=4))
def push_ws(self, json_obj):
self.ws.push(json.dumps(sanitize_html(json_obj), indent=4))
| {
"repo_name": "tomgalloway/OpenBazaar-Server",
"path": "market/listeners.py",
"copies": "1",
"size": "4648",
"license": "mit",
"hash": 2275380410536130600,
"line_mean": 39.4173913043,
"line_max": 114,
"alpha_frac": 0.5432444062,
"autogenerated": false,
"ratio": 4.56581532416503,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5609059730365029,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import math
import numpy as np
import mathutils
def calculate_feature_vector_sequence(traj, args, delayed_strokes=None):
"""
Calculates all features named in args for each point in trajectory traj. Valid features are "dir", "curv",
"penup", "hat", "vic_aspect", "vic_curl", "vic_line", "vic_slope" and "bitmap". Note that calculating the hat
feature requires precalculated delayed strokes.
"""
return np.array([__calculate_feature_vector(traj, p, args, delayed_strokes) for p in range(len(traj))])
def __calculate_feature_vector(traj, point_index, args, delayed_strokes=None):
# calculating number of features is not pretty because dir, curv and bitmap are actually more than one feature...
num_features = len(args)
if "dir" in args:
num_features += 1
if "curv" in args:
num_features += 1
if "bitmap" in args:
num_features += 8
if len(traj) < 5:
return np.zeros(num_features)
feat_vec = []
if "dir" in args:
feat_vec.extend(__writing_direction(traj, point_index))
if "curv" in args:
feat_vec.extend(__curvature(traj, point_index))
if "penup" in args:
feat_vec.append(__is_penup(traj, point_index))
if "hat" in args:
feat_vec.append(__hat(traj, point_index, delayed_strokes))
if "vic_aspect" in args:
feat_vec.append(__vicinity_aspect(traj, point_index))
if "vic_curl" in args:
feat_vec.append(__vicinity_curliness(traj, point_index))
if "vic_line" in args:
feat_vec.append(__vicinity_lineness(traj, point_index))
if "vic_slope" in args:
feat_vec.append(__vicinity_slope(traj, point_index))
if "bitmap" in args:
feat_vec.extend(__context_bitmap(traj, point_index))
return np.array(mathutils.normalize(feat_vec))
def __writing_direction(traj, point_idx):
if point_idx == 0:
# first point in trajectory
d = traj[point_idx, :2] - traj[point_idx + 1, :2]
elif point_idx == len(traj) - 1:
# last point in trajectory
d = traj[point_idx-1, :2] - traj[point_idx, :2]
else:
d = traj[point_idx-1, :2] - traj[point_idx + 1, :2]
ds = np.linalg.norm(d)
return d / ds if ds != 0 else [0.0, 0.0]
def __curvature(traj, point_idx):
if point_idx == 0:
# first point in trajectory
[cos_prev, sin_prev] = __writing_direction(traj, point_idx)
[cos_next, sin_next] = __writing_direction(traj, point_idx + 1)
elif point_idx == len(traj) - 1:
# last point in trajectory
[cos_prev, sin_prev] = __writing_direction(traj, point_idx - 1)
[cos_next, sin_next] = __writing_direction(traj, point_idx)
else:
[cos_prev, sin_prev] = __writing_direction(traj, point_idx - 1)
[cos_next, sin_next] = __writing_direction(traj, point_idx + 1)
curv_cos = cos_prev * cos_next + sin_prev * sin_next
curv_sin = cos_prev * sin_next - sin_prev * cos_next
return [curv_cos, curv_sin]
def __is_penup(traj, point_idx):
return float(traj[point_idx, 2])
def __hat(traj, point_idx, delayed_strokes):
if delayed_strokes is None:
return 0.0
for stroke in delayed_strokes:
minx = min(stroke[:, 0])
maxx = max(stroke[:, 0])
miny = min(stroke[:, 1])
# we check for each stroke if the point is under (smaller y coord) this stroke
if minx <= traj[point_idx, 0] <= maxx and traj[point_idx, 1] < miny:
return 1.0
return 0.0
def __vicinity_aspect(traj, point_idx):
# filter out cases where there is not enough points to either side
if point_idx < 2 or point_idx > len(traj) - 3:
return 0.0
vicinity = traj[point_idx-2:point_idx+3, :2]
dx = max(vicinity[:, 0]) - min(vicinity[:, 0])
dy = max(vicinity[:, 1]) - min(vicinity[:, 1])
if dx + dy == 0:
return 0.0
return 2 * float(dy) / (dx + dy) - 1
def __vicinity_curliness(traj, point_idx):
# filter out cases where there is not enough points to either side
if point_idx < 2 or point_idx > len(traj) - 3:
return 0.0
vicinity = traj[point_idx-2:point_idx+3, :2]
dx = max(vicinity[:, 0]) - min(vicinity[:, 0])
dy = max(vicinity[:, 1]) - min(vicinity[:, 1])
segment_length = sum([np.linalg.norm(vicinity[i]-vicinity[i+1]) for i in range(len(vicinity)-2)])
if max(dx, dy) == 0:
return 0.0
return float(segment_length) / max(dx, dy) - 2
def __vicinity_lineness(traj, point_idx):
# filter out cases where there is not enough points to either side
if point_idx < 2 or point_idx > len(traj) - 3:
return 0.0
v = traj[point_idx-2:point_idx+3, :2]
first = 0
last = len(v) - 1
x1 = v[first, 0]
x2 = v[last, 0]
y1 = v[first, 1]
y2 = v[last, 1]
diag_line_length = math.sqrt((y2 - y1)**2 + (x2 - x1)**2)
if diag_line_length == 0:
# first and last point have same coordinates, so we return average squared distance to that point
return sum([math.sqrt((y2 - y)**2 + (x2 - x)**2)**2 for [x, y] in v]) / len(v)
dist_to_line = lambda x, y: abs((y2 - y1) * x - (x2 - x1) * y + x2 * y1 - y2 * x1) / math.sqrt((y2 - y1)**2 + (x2 - x1)**2)
return sum([dist_to_line(x, y)**2 for [x, y] in v]) / len(v)
def __vicinity_slope(traj, point_idx):
# filter out cases where there is not enough points to either side
if point_idx < 2 or point_idx > len(traj) - 3:
return 0.0
vicinity = traj[point_idx-2:point_idx+2, :2]
first = 0
last = len(vicinity) - 1
xdiff = vicinity[last, 0] - vicinity[first, 0]
if xdiff != 0:
slope = (vicinity[last, 1] - vicinity[first, 1]) / xdiff
else:
slope = 0
return math.cos(math.atan(slope))
def __context_bitmap(traj, point_idx, bin_size=10):
# the current point lies in the center of the bitmap and we use a 3x3 grid around that point
window_origin_x = traj[point_idx][0] - 3 * bin_size / 2
window_origin_y = traj[point_idx][1] - 3 * bin_size / 2
bitmap = [[0, 0, 0],
[0, 0, 0],
[0, 0, 0]]
num_points = 0
for p in traj:
bin_x = int((p[0] - window_origin_x) / bin_size)
bin_y = int((p[1] - window_origin_y) / bin_size)
if 0 <= bin_x <= 2 and 0 <= bin_y <= 2:
bitmap[bin_y][bin_x] += 1
num_points += 1
return mathutils.normalize(np.array([p / float(num_points) for bin in bitmap for p in bin]))
| {
"repo_name": "cwiep/online-handwriting-tools",
"path": "traj/trajfeat.py",
"copies": "1",
"size": "6475",
"license": "apache-2.0",
"hash": -5738362040503241000,
"line_mean": 36.6453488372,
"line_max": 127,
"alpha_frac": 0.593976834,
"autogenerated": false,
"ratio": 3.01022780102278,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.410420463502278,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import nacl.signing
import nacl.utils
import nacl.encoding
import nacl.hash
from nacl.public import PrivateKey, PublicKey, Box
from zope.interface import implements
from rpcudp import RPCProtocol
from interfaces import MessageProcessor
from log import Logger
from protos.message import GET_CONTRACT, GET_IMAGE, GET_PROFILE, GET_LISTINGS, \
GET_USER_METADATA, FOLLOW, UNFOLLOW, \
GET_FOLLOWERS, GET_FOLLOWING, NOTIFY, GET_CONTRACT_METADATA, MESSAGE
from db.datastore import HashMap, ListingsStore, FollowData
from market.profile import Profile
from protos.objects import Metadata, Listings, Followers, Plaintext_Message
from binascii import hexlify
from zope.interface.verify import verifyObject
from zope.interface.exceptions import DoesNotImplement
from interfaces import NotificationListener, MessageListener
class MarketProtocol(RPCProtocol):
implements(MessageProcessor)
def __init__(self, node_proto, router, signing_key):
self.router = router
RPCProtocol.__init__(self, node_proto, router)
self.log = Logger(system=self)
self.multiplexer = None
self.hashmap = HashMap()
self.signing_key = signing_key
self.listeners = []
self.handled_commands = [GET_CONTRACT, GET_IMAGE, GET_PROFILE, GET_LISTINGS, GET_USER_METADATA,
GET_CONTRACT_METADATA, FOLLOW, UNFOLLOW, GET_FOLLOWERS, GET_FOLLOWING,
NOTIFY, MESSAGE]
def connect_multiplexer(self, multiplexer):
self.multiplexer = multiplexer
def add_listener(self, listener):
self.listeners.append(listener)
def rpc_get_contract(self, sender, contract_hash):
self.log.info("Looking up contract ID %s" % contract_hash.encode('hex'))
self.router.addContact(sender)
try:
with open(self.hashmap.get_file(contract_hash), "r") as filename:
contract = filename.read()
return [contract]
except Exception:
self.log.warning("Could not find contract %s" % contract_hash.encode('hex'))
return ["None"]
def rpc_get_image(self, sender, image_hash):
self.log.info("Looking up image with hash %s" % image_hash.encode('hex'))
self.router.addContact(sender)
try:
with open(self.hashmap.get_file(image_hash), "r") as filename:
image = filename.read()
return [image]
except Exception:
self.log.warning("Could not find image %s" % image_hash.encode('hex'))
return ["None"]
def rpc_get_profile(self, sender):
self.log.info("Fetching profile")
self.router.addContact(sender)
try:
proto = Profile().get(True)
return [proto, self.signing_key.sign(proto)[:64]]
except Exception:
self.log.error("Unable to load the profile")
return ["None"]
def rpc_get_user_metadata(self, sender):
self.log.info("Fetching metadata")
self.router.addContact(sender)
try:
proto = Profile().get(False)
m = Metadata()
m.name = proto.name
m.handle = proto.handle
m.avatar_hash = proto.avatar_hash
m.nsfw = proto.nsfw
return [m.SerializeToString(), self.signing_key.sign(m.SerializeToString())[:64]]
except Exception:
self.log.error("Unable to get the profile metadata")
return ["None"]
def rpc_get_listings(self, sender):
self.log.info("Fetching listings")
self.router.addContact(sender)
try:
p = Profile().get()
l = Listings()
l.ParseFromString(ListingsStore().get_proto())
l.handle = p.handle
l.avatar_hash = p.avatar_hash
return [l.SerializeToString(), self.signing_key.sign(l.SerializeToString())[:64]]
except Exception:
self.log.warning("Could not find any listings in the database")
return ["None"]
def rpc_get_contract_metadata(self, sender, contract_hash):
self.log.info("Fetching metadata for contract %s" % hexlify(contract_hash))
self.router.addContact(sender)
try:
proto = ListingsStore().get_proto()
l = Listings()
l.ParseFromString(proto)
for listing in l.listing:
if listing.contract_hash == contract_hash:
ser = listing.SerializeToString()
return [ser, self.signing_key.sign(ser)[:64]]
except Exception:
self.log.warning("Could not find metadata for contract %s" % hexlify(contract_hash))
return ["None"]
def rpc_follow(self, sender, proto, signature):
self.log.info("Follow request from %s" % sender.id.encode("hex"))
self.router.addContact(sender)
try:
verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
verify_key.verify(proto, signature)
f = Followers.Follower()
f.ParseFromString(proto)
if f.guid != sender.id:
raise Exception('GUID does not match sending node')
if f.following != self.proto.guid:
raise Exception('Following wrong node')
f.signature = signature
FollowData().set_follower(f)
proto = Profile().get(False)
m = Metadata()
m.name = proto.name
m.handle = proto.handle
m.avatar_hash = proto.avatar_hash
m.nsfw = proto.nsfw
return ["True", m.SerializeToString(), self.signing_key.sign(m.SerializeToString())[:64]]
except Exception:
self.log.warning("Failed to validate follower")
return ["False"]
def rpc_unfollow(self, sender, signature):
self.log.info("Unfollow request from %s" % sender.id.encode("hex"))
self.router.addContact(sender)
try:
verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
verify_key.verify("unfollow:" + self.proto.guid, signature)
f = FollowData()
f.delete_follower(sender.id)
return ["True"]
except Exception:
self.log.warning("Failed to validate follower signature")
return ["False"]
def rpc_get_followers(self, sender):
self.log.info("Fetching followers list from db")
self.router.addContact(sender)
ser = FollowData().get_followers()
if ser is None:
return ["None"]
else:
return [ser, self.signing_key.sign(ser)[:64]]
def rpc_get_following(self, sender):
self.log.info("Fetching following list from db")
self.router.addContact(sender)
ser = FollowData().get_following()
if ser is None:
return ["None"]
else:
return [ser, self.signing_key.sign(ser)[:64]]
def rpc_notify(self, sender, message, signature):
if len(message) <= 140 and FollowData().is_following(sender.id):
try:
verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
verify_key.verify(message, signature)
except Exception:
return ["False"]
self.log.info("Received a notification from %s" % sender)
self.router.addContact(sender)
for listener in self.listeners:
try:
verifyObject(NotificationListener, listener)
listener.notify(sender.id, message)
except DoesNotImplement:
pass
return ["True"]
else:
return ["False"]
def rpc_message(self, sender, pubkey, encrypted):
try:
box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
plaintext = box.decrypt(encrypted)
p = Plaintext_Message()
p.ParseFromString(plaintext)
signature = p.signature
p.ClearField("signature")
verify_key = nacl.signing.VerifyKey(p.signed_pubkey[64:])
verify_key.verify(p.SerializeToString(), signature)
h = nacl.hash.sha512(p.signed_pubkey)
pow_hash = h[64:128]
if int(pow_hash[:6], 16) >= 50 or hexlify(p.sender_guid) != h[:40] or p.sender_guid != sender.id:
raise Exception('Invalid guid')
self.log.info("Received a message from %s" % sender)
self.router.addContact(sender)
for listener in self.listeners:
try:
verifyObject(MessageListener, listener)
listener.notify(p, signature)
except DoesNotImplement:
pass
return ["True"]
except Exception:
self.log.error("Received invalid message from %s" % sender)
return ["False"]
def callGetContract(self, nodeToAsk, contract_hash):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_contract(address, contract_hash)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetImage(self, nodeToAsk, image_hash):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_image(address, image_hash)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetProfile(self, nodeToAsk):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_profile(address)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetUserMetadata(self, nodeToAsk):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_user_metadata(address)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetListings(self, nodeToAsk):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_listings(address)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetContractMetadata(self, nodeToAsk, contract_hash):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_contract_metadata(address, contract_hash)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callFollow(self, nodeToAsk, proto, signature):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.follow(address, proto, signature)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callUnfollow(self, nodeToAsk, signature):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.unfollow(address, signature)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetFollowers(self, nodeToAsk):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_followers(address)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callGetFollowing(self, nodeToAsk):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_following(address)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callNotify(self, nodeToAsk, message, signature):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.notify(address, message, signature)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def callMessage(self, nodeToAsk, ehemeral_pubkey, ciphertext):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.message(address, ehemeral_pubkey, ciphertext)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def handleCallResponse(self, result, node):
"""
If we get a response, add the node to the routing table. If
we get no response, make sure it's removed from the routing table.
"""
if result[0]:
self.log.info("got response from %s, adding to router" % node)
self.router.addContact(node)
else:
self.log.debug("no response from %s, removing from router" % node)
self.router.removeContact(node)
return result
def __iter__(self):
return iter(self.handled_commands)
| {
"repo_name": "hoffmabc/OpenBazaar-Server",
"path": "market/protocol.py",
"copies": "2",
"size": "12165",
"license": "mit",
"hash": -8328205290075180000,
"line_mean": 40.097972973,
"line_max": 109,
"alpha_frac": 0.6143033292,
"autogenerated": false,
"ratio": 4.0428713858424725,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0008582694288997508,
"num_lines": 296
} |
__author__ = 'chris'
import numpy as np
import os
import matplotlib.image as mimg
import mathutils
class SpatialPyramid():
def __init__(self, levels, num_centroids):
"""
Creates Spatial Pyramid object for given parameters.
:param levels: list of bin configurations. examples: [ [2, 1] ] for one level and two bins splitting
the trajectory in left and right half. [ [2, 1], [1, 1] ] same as above but adding a second level without splitting.
"""
self.levels = levels
self.num_centroids = num_centroids
def num_total_bins(self):
return sum([level[0]*level[1] for level in self.levels])
def descriptor_size(self):
return self.num_total_bins() * self.num_centroids
def calculate_descriptor_from_mat(self, desc_mat):
"""
Calculates descriptors for given matrix of quantized vector labels by applying
a spatial pyramid scheme and flattening and normalizing the resulting array.
:return: One-dimensional array representing descriptor of the given matrix.
"""
spatial_pyramid = self.__extract_spatial_pyramid_from_mat(desc_mat)
return self.__pyramid_to_descriptor(spatial_pyramid)
def __extract_spatial_pyramid_from_mat(self, desc_mat):
"""
Applies a spatial pyramid scheme to a given matrix of quantized vector labels.
:return: An array of arrays containing each bin for each level of the spatial pyramid.
"""
spatial_pyramid = []
for level in self.levels:
num_bins_x, num_bins_y = level
bins = np.zeros(shape=(num_bins_y, num_bins_x), dtype=object)
bin_size_x = desc_mat.shape[1]/num_bins_x
bin_size_y = desc_mat.shape[0]/num_bins_y
# log.d("Binsize on level {}: {}, {}".format(level, bin_size_x, bin_size_y))
for x in range(num_bins_x):
for y in range(num_bins_y):
# log.d("level {} {}:".format(x, y))
# If it's the last bin in x- or y-direction we use all the remaining
# points and ignore bin size.
xmin = x * bin_size_x
xmax = desc_mat.shape[1]-1 if x == num_bins_x - 1 else (x+1)*bin_size_x
ymin = y * bin_size_y
ymax = desc_mat.shape[0]-1 if y == num_bins_y - 1 else (y+1)*bin_size_y
indices = np.ravel(desc_mat[ymin:ymax, xmin:xmax])
hist = np.bincount(indices, minlength=self.num_centroids)
bins[y, x] = hist
spatial_pyramid.append(bins)
return np.array(spatial_pyramid)
def calculate_descriptor(self, keypoints, labels, origin, width, height):
"""
Calculates descriptor for a given list of keypoints and quantized vector labels by applying
a spatial pyramid scheme and flattening and normalizing the resulting array.
This is slower than calculate_descriptor_from_mat!
:param keypoints: Keypoints (with format [x,y]) for which labels are given.
:param labels: labels[i] contains the quantized vector label of the i-th keypoint.
:param origin: Origin of the area that the spatial pyramid is calculated for (e.g. (0,0) for whole image).
:param width: Width of the area the spatial pyramid is calculated for.
:param height: Height of the area the spatial pyramid is calculated for.
:return: An array of arrays containing each bin for each level of the spatial pyramid.
"""
spatial_pyramid = self.__extract_spatial_pyramid(keypoints, labels, origin, width, height)
return self.__pyramid_to_descriptor(spatial_pyramid)
def __extract_spatial_pyramid(self, keypoints, labels, origin, width, height):
"""
Applies a spatial pyramid scheme to a given set of keypoints/quantized vector labels.
"""
levels = len(self.levels)
spatial_pyramid = []
hist_template = [0] * self.num_centroids
for level in range(levels):
num_bins_x = self.levels[level][0]
num_bins_y = self.levels[level][1]
bins = [[list(hist_template) for _ in range(num_bins_y)] for _ in range(num_bins_x)]
bin_size_x, bin_size_y = float(width)/num_bins_x, float(height)/num_bins_y
# log.d("Binsize on level {}: {}, {}".format(level, bin_size_x, bin_size_y))
for index, point in enumerate(keypoints):
# bin_size + 1: preventing index error, for keypoints situated directly on an edge
# of a bin: keypoint (82, 176), bin_sizes (405, 176) would yield ybin-index 1 (of total 1)
xbin = int((point[0]-origin[0]) / (bin_size_x+1))
ybin = int((point[1]-origin[1]) / (bin_size_y+1))
bins[xbin][ybin][labels[index]] += 1
spatial_pyramid.append(bins)
return np.array(spatial_pyramid)
def __pyramid_to_descriptor(self, descriptor_pyramid):
"""
Calculates descriptor by flattening the spatial bins in descriptor_pyramid.
:return: One-dimensional array representing descriptor of a word-snippet.
"""
flattened = []
for index, level in enumerate(descriptor_pyramid):
tmp = []
for horiz_bin in level:
for vert_bin in horiz_bin:
tmp.extend(mathutils.normalize(vert_bin))
# tmp.extend(vert_bin)
flattened.extend(tmp)
# return np.array(mathutils.normalize(flattened))
return mathutils.normalize(np.array(mathutils.power_normalize(flattened)))
# return np.array(flattened)
| {
"repo_name": "cwiep/online-handwriting-tools",
"path": "traj/spatialpyramid.py",
"copies": "1",
"size": "5735",
"license": "apache-2.0",
"hash": -750732515675584600,
"line_mean": 47.1932773109,
"line_max": 124,
"alpha_frac": 0.6074978204,
"autogenerated": false,
"ratio": 3.906675749318801,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5014173569718801,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import numpy as np
class BoofGenerator():
def __init__(self, spatial_pyramid):
self.spatial_pyramid = spatial_pyramid
def build_feature_vectors_matrix(self, keypoints, labels):
print("Building trajectory feature matrix...")
num_features = self.spatial_pyramid.descriptor_size()
num_examples = len(keypoints)
feat_mat = np.zeros(shape=(num_examples, num_features))
i = 0
for keyp, lab in zip(keypoints, labels):
feat_mat[i] = self.build_feature_vector(keyp, lab)
i += 1
log.update_progress(i+1, num_examples)
print("")
print("Accumulated {} feature vectors.".format(len(feat_mat)))
return np.array(feat_mat)
def build_feature_vector(self, keypoints, labels):
minx = min(keypoints[:, 0])
maxx = max(keypoints[:, 0])
miny = min(keypoints[:, 1])
maxy = max(keypoints[:, 1])
return self.spatial_pyramid.calculate_descriptor(keypoints, labels, (minx, miny), maxx-minx, maxy-miny)
| {
"repo_name": "cwiep/online-handwriting-tools",
"path": "traj/boof.py",
"copies": "1",
"size": "1068",
"license": "apache-2.0",
"hash": 5795927993510548000,
"line_mean": 35.8275862069,
"line_max": 111,
"alpha_frac": 0.611423221,
"autogenerated": false,
"ratio": 3.548172757475083,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4659595978475083,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import numpy as np
def read_trajectory_from_file(file):
"""
Reads points of an online trajectory from a textfile where
each line is formatted as "x y penup". All entries have to be integers.
penup is 0/1 depending on the state of the pen. An optional annotation of
the presented word can be given on the first line
:param file: Textfile containing points of a trajectory.
:return: Numpy array with columns x, y and penup, annotation or None
"""
points = []
annotation = None
with open(file, "r") as traj_file:
for line in traj_file:
parts = line.split(" ")
if len(parts) != 3:
annotation = line.strip()
continue
points.append([int(p.strip()) for p in parts])
return np.array(points), annotation
def read_traj_clusters(filename):
"""
Parses a textfile of online clusters, where each line contains
the space-separated values of one cluster-center.
"""
read_line = lambda s: [float(p) for p in s.split(" ")]
with open(filename, "r") as infile:
traj_clusters = np.array([read_line(line) for line in infile])
return traj_clusters
def read_traj_keypoints(filename):
points = []
with open(filename, "r") as keypoint_file:
for line in keypoint_file:
parts = line.split(" ")
points.append([int(p.strip()) for p in parts])
return np.array(points)
def read_traj_labels(filename):
labels = []
with open(filename, "r") as keypoint_file:
for line in keypoint_file:
labels.append(int(line.strip()))
return np.array(labels) | {
"repo_name": "cwiep/online-handwriting-tools",
"path": "traj/trajimport.py",
"copies": "1",
"size": "1674",
"license": "apache-2.0",
"hash": -3616066552995283000,
"line_mean": 30.6037735849,
"line_max": 77,
"alpha_frac": 0.6254480287,
"autogenerated": false,
"ratio": 3.8306636155606406,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9951018607507867,
"avg_score": 0.001018607350554904,
"num_lines": 53
} |
__author__ = 'chris'
import os
import sqlite3 as lite
from api.utils import sanitize_html
from collections import Counter
from config import DATA_FOLDER
from dht.node import Node
from dht.utils import digest
from protos import objects
from protos.objects import Listings, Followers, Following
from os.path import join
from db.migrations import migration1, migration2, migration3, migration4, migration5
class Database(object):
__slots__ = ['PATH', 'filemap', 'profile', 'listings', 'keys', 'follow', 'messages',
'notifications', 'broadcasts', 'vendors', 'moderators', 'purchases', 'sales',
'cases', 'ratings', 'transactions', 'settings']
def __init__(self, testnet=False, filepath=None):
object.__setattr__(self, 'PATH', self._database_path(testnet, filepath))
object.__setattr__(self, 'filemap', HashMap(self.PATH))
object.__setattr__(self, 'profile', ProfileStore(self.PATH))
object.__setattr__(self, 'listings', ListingsStore(self.PATH))
object.__setattr__(self, 'keys', KeyStore(self.PATH))
object.__setattr__(self, 'follow', FollowData(self.PATH))
object.__setattr__(self, 'messages', MessageStore(self.PATH))
object.__setattr__(self, 'notifications', NotificationStore(self.PATH))
object.__setattr__(self, 'broadcasts', BroadcastStore(self.PATH))
object.__setattr__(self, 'vendors', VendorStore(self.PATH))
object.__setattr__(self, 'moderators', ModeratorStore(self.PATH))
object.__setattr__(self, 'purchases', Purchases(self.PATH))
object.__setattr__(self, 'sales', Sales(self.PATH))
object.__setattr__(self, 'cases', Cases(self.PATH))
object.__setattr__(self, 'ratings', Ratings(self.PATH))
object.__setattr__(self, 'transactions', Transactions(self.PATH))
object.__setattr__(self, 'settings', Settings(self.PATH))
self._initialize_datafolder_tree()
self._initialize_database(self.PATH)
def get_database_path(self):
return self.PATH
def _initialize_database(self, database_path):
"""
Create database, if not present, and clear cache.
"""
if not database_path:
raise RuntimeError('attempted to initialize empty path')
if not os.path.isfile(database_path):
self._create_database(database_path)
cache = join(DATA_FOLDER, "cache.pickle")
if os.path.exists(cache):
os.remove(cache)
self._run_migrations()
@staticmethod
def _database_path(testnet, filepath):
'''
Get database pathname.
Args:
testnet: Boolean
filename: If provided, overrides testnet
'''
path = ''
if filepath:
path = filepath
elif testnet:
path = join(DATA_FOLDER, "OB-Testnet.db")
else:
path = join(DATA_FOLDER, "OB-Mainnet.db")
return path
@staticmethod
def connect_database(path):
conn = lite.connect(path)
conn.text_factory = str
return conn
@staticmethod
def _initialize_datafolder_tree():
"""
Creates, if not present, directory tree in DATA_FOLDER.
"""
tree = [
['cache'],
['store', 'contracts', 'listings'],
['store', 'contracts', 'in progress'],
['store', 'contracts', 'unfunded'],
['store', 'contracts', 'trade receipts'],
['store', 'media'],
['purchases', 'in progress'],
['purchases', 'unfunded'],
['purchases', 'trade receipts'],
['cases']
]
path = ''
for sub_tree in tree:
path = DATA_FOLDER
for directory in sub_tree:
path = join(path, directory)
if not os.path.exists(path):
os.makedirs(path, mode=0o750)
@staticmethod
def _create_database(database_path):
conn = lite.connect(database_path)
cursor = conn.cursor()
cursor.execute('''PRAGMA user_version = 5''')
cursor.execute('''CREATE TABLE hashmap(hash TEXT PRIMARY KEY, filepath TEXT)''')
cursor.execute('''CREATE TABLE profile(id INTEGER PRIMARY KEY, serializedUserInfo BLOB, tempHandle TEXT)''')
cursor.execute('''CREATE TABLE listings(id INTEGER PRIMARY KEY, serializedListings BLOB)''')
cursor.execute('''CREATE TABLE keys(type TEXT PRIMARY KEY, privkey BLOB, pubkey BLOB)''')
cursor.execute('''CREATE TABLE followers(guid TEXT UNIQUE, serializedFollower TEXT)''')
cursor.execute('''CREATE INDEX index_followers ON followers(serializedFollower);''')
cursor.execute('''CREATE TABLE following(id INTEGER PRIMARY KEY, serializedFollowing BLOB)''')
cursor.execute('''CREATE TABLE messages(msgID TEXT PRIMARY KEY, guid TEXT, handle TEXT, pubkey BLOB,
subject TEXT, messageType TEXT, message TEXT, timestamp INTEGER, avatarHash BLOB, signature BLOB,
outgoing INTEGER, read INTEGER)''')
cursor.execute('''CREATE INDEX index_guid ON messages(guid);''')
cursor.execute('''CREATE INDEX index_subject ON messages(subject);''')
cursor.execute('''CREATE INDEX index_messages_read ON messages(read);''')
cursor.execute('''CREATE INDEX index_timestamp ON messages(timestamp);''')
cursor.execute('''CREATE TABLE notifications(notifID TEXT UNIQUE, guid BLOB, handle TEXT, type TEXT,
orderId TEXT, title TEXT, timestamp INTEGER, imageHash BLOB, read INTEGER)''')
cursor.execute('''CREATE INDEX index_notification_read ON notifications(read);''')
cursor.execute('''CREATE TABLE broadcasts(id TEXT PRIMARY KEY, guid BLOB, handle TEXT, message TEXT,
timestamp INTEGER, avatarHash BLOB)''')
cursor.execute('''CREATE TABLE vendors(guid TEXT PRIMARY KEY, serializedNode BLOB)''')
cursor.execute('''CREATE TABLE moderators(guid TEXT PRIMARY KEY, pubkey BLOB, bitcoinKey BLOB,
bitcoinSignature BLOB, handle TEXT, name TEXT, description TEXT, avatar BLOB, fee FLOAT)''')
cursor.execute('''CREATE TABLE purchases(id TEXT PRIMARY KEY, title TEXT, description TEXT,
timestamp INTEGER, btc FLOAT, address TEXT, status INTEGER, outpoint BLOB, thumbnail BLOB, vendor TEXT,
proofSig BLOB, contractType TEXT, unread INTEGER)''')
cursor.execute('''CREATE TABLE sales(id TEXT PRIMARY KEY, title TEXT, description TEXT,
timestamp INTEGER, btc REAL, address TEXT, status INTEGER, thumbnail BLOB, outpoint BLOB, buyer TEXT,
paymentTX TEXT, contractType TEXT, unread INTEGER)''')
cursor.execute('''CREATE TABLE cases(id TEXT PRIMARY KEY, title TEXT, timestamp INTEGER, orderDate TEXT,
btc REAL, thumbnail BLOB, buyer TEXT, vendor TEXT, validation TEXT, claim TEXT, status INTEGER,
unread INTEGER)''')
cursor.execute('''CREATE TABLE ratings(listing TEXT, ratingID TEXT, rating TEXT)''')
cursor.execute('''CREATE INDEX index_listing ON ratings(listing);''')
cursor.execute('''CREATE INDEX index_rating_id ON ratings(ratingID);''')
cursor.execute('''CREATE TABLE transactions(tx BLOB);''')
cursor.execute('''CREATE TABLE settings(id INTEGER PRIMARY KEY, refundAddress TEXT, currencyCode TEXT,
country TEXT, language TEXT, timeZone TEXT, notifications INTEGER, shippingAddresses BLOB, blocked BLOB,
termsConditions TEXT, refundPolicy TEXT, moderatorList BLOB, username TEXT, password TEXT,
smtpNotifications INTEGER, smtpServer TEXT, smtpSender TEXT, smtpRecipient TEXT, smtpUsername TEXT,
smtpPassword TEXT)''')
conn.commit()
conn.close()
def _run_migrations(self):
conn = self.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''PRAGMA user_version''')
version = cursor.fetchone()[0]
conn.close()
if version == 0:
migration1.migrate(self.PATH)
migration2.migrate(self.PATH)
migration3.migrate(self.PATH)
migration4.migrate(self.PATH)
migration5.migrate(self.PATH)
elif version == 1:
migration2.migrate(self.PATH)
migration3.migrate(self.PATH)
migration4.migrate(self.PATH)
migration5.migrate(self.PATH)
elif version == 2:
migration3.migrate(self.PATH)
migration4.migrate(self.PATH)
migration5.migrate(self.PATH)
elif version == 3:
migration4.migrate(self.PATH)
migration5.migrate(self.PATH)
elif version == 4:
migration5.migrate(self.PATH)
class HashMap(object):
"""
Creates a table in the database for mapping file hashes (which are sent
over the wire in a query) with a more human readable filename in local
storage. This is useful for users who want to look through their store
data on disk.
"""
def __init__(self, database_path):
self.PATH = database_path
def insert(self, hash_value, filepath):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''INSERT OR REPLACE INTO hashmap(hash, filepath)
VALUES (?,?)''', (hash_value, filepath))
conn.commit()
conn.close()
def get_file(self, hash_value):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT filepath FROM hashmap WHERE hash=?''', (hash_value,))
ret = cursor.fetchone()
conn.close()
if ret is None:
return None
return DATA_FOLDER + ret[0]
def get_all(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT * FROM hashmap ''')
ret = cursor.fetchall()
conn.close()
return ret
def delete(self, hash_value):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM hashmap WHERE hash = ?''', (hash_value,))
conn.commit()
conn.close()
def delete_all(self):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM hashmap''')
conn.commit()
conn.close()
class ProfileStore(object):
"""
Stores the user's profile data in the db. The profile is stored as a serialized
Profile protobuf object. It's done this way because because protobuf is more
flexible and allows for storing custom repeated fields (like the SocialAccount
object). Also we will just serve this over the wire so we don't have to manually
rebuild it every startup. To interact with the profile you should use the
`market.profile` module and not this class directly.
"""
def __init__(self, database_path):
self.PATH = database_path
def set_proto(self, proto):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
handle = self.get_temp_handle()
cursor.execute('''INSERT OR REPLACE INTO profile(id, serializedUserInfo, tempHandle)
VALUES (?,?,?)''', (1, proto, handle))
conn.commit()
conn.close()
def get_proto(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT serializedUserInfo FROM profile WHERE id = 1''')
ret = cursor.fetchone()
conn.close()
if ret is None:
return None
return ret[0]
def set_temp_handle(self, handle):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
if self.get_proto() is None:
cursor.execute('''INSERT OR REPLACE INTO profile(id, tempHandle)
VALUES (?,?)''', (1, handle))
else:
cursor.execute('''UPDATE profile SET tempHandle=? WHERE id=?;''', (handle, 1))
conn.commit()
conn.close()
def get_temp_handle(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT tempHandle FROM profile WHERE id = 1''')
ret = cursor.fetchone()
conn.close()
if ret is None:
return ""
else:
return ret[0]
class ListingsStore(object):
"""
Stores a serialized `Listings` protobuf object. It contains metadata for all the
contracts hosted by this store. We will send this in response to a GET_LISTING
query. This should be updated each time a new contract is created.
"""
def __init__(self, database_path):
self.PATH = database_path
def add_listing(self, proto):
"""
Will also update an existing listing if the contract hash is the same.
"""
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
l = Listings()
ser = self.get_proto()
if ser is not None:
l.ParseFromString(ser)
for listing in l.listing:
if listing.contract_hash == proto.contract_hash:
l.listing.remove(listing)
l.listing.extend([proto])
cursor.execute('''INSERT OR REPLACE INTO listings(id, serializedListings)
VALUES (?,?)''', (1, l.SerializeToString()))
conn.commit()
conn.close()
def delete_listing(self, hash_value):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
ser = self.get_proto()
if ser is None:
return
l = Listings()
l.ParseFromString(ser)
for listing in l.listing:
if listing.contract_hash == hash_value:
l.listing.remove(listing)
cursor.execute('''INSERT OR REPLACE INTO listings(id, serializedListings)
VALUES (?,?)''', (1, l.SerializeToString()))
conn.commit()
conn.close()
def delete_all_listings(self):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM listings''')
conn.commit()
conn.close()
def get_proto(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT serializedListings FROM listings WHERE id = 1''')
ret = cursor.fetchone()
conn.close()
if ret is None:
return None
return ret[0]
class KeyStore(object):
"""
Stores the keys for this node.
"""
def __init__(self, database_path):
self.PATH = database_path
def set_key(self, key_type, privkey, pubkey):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''INSERT OR REPLACE INTO keys(type, privkey, pubkey)
VALUES (?,?,?)''', (key_type, privkey, pubkey))
conn.commit()
conn.close()
def get_key(self, key_type):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT privkey, pubkey FROM keys WHERE type=?''', (key_type,))
ret = cursor.fetchone()
conn.close()
if not ret:
return None
else:
return ret
def delete_all_keys(self):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM keys''')
conn.commit()
conn.close()
class FollowData(object):
"""
A class for saving and retrieving follower and following data
for this node.
"""
def __init__(self, database_path):
self.PATH = database_path
def follow(self, proto):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
f = Following()
ser = self.get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == proto.guid:
f.users.remove(user)
f.users.extend([proto])
cursor.execute('''INSERT OR REPLACE INTO following(id, serializedFollowing) VALUES (?,?)''',
(1, f.SerializeToString()))
conn.commit()
conn.close()
def unfollow(self, guid):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
f = Following()
ser = self.get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == guid:
f.users.remove(user)
cursor.execute('''INSERT OR REPLACE INTO following(id, serializedFollowing) VALUES (?,?)''',
(1, f.SerializeToString()))
conn.commit()
conn.close()
def get_following(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT serializedFollowing FROM following WHERE id=1''')
ret = cursor.fetchall()
conn.close()
if not ret:
return None
else:
return ret[0][0]
def is_following(self, guid):
f = Following()
ser = self.get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == guid:
return True
return False
def set_follower(self, proto):
conn = Database.connect_database(self.PATH)
p = Followers.Follower()
p.ParseFromString(proto)
with conn:
cursor = conn.cursor()
cursor.execute('''INSERT OR REPLACE INTO followers(guid, serializedFollower) VALUES (?,?)''',
(p.guid.encode("hex"), proto.encode("hex")))
conn.commit()
conn.close()
def delete_follower(self, guid):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM followers WHERE guid=?''', (guid.encode("hex"), ))
conn.commit()
conn.close()
def get_followers(self, start=0):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT Count(*) FROM followers''')
count = cursor.fetchone()[0]
f = Followers()
if count > 0:
smt = '''select serializedFollower from followers order by rowid desc limit 30 offset ''' + str(start)
cursor.execute(smt)
serialized_followers = cursor.fetchall()
conn.close()
for proto in serialized_followers:
p = Followers.Follower()
p.ParseFromString(proto[0].decode("hex"))
f.followers.extend([p])
return (f.SerializeToString(), count)
class MessageStore(object):
"""
Stores all of the chat messages for this node and allows retrieval of
messages and conversations as well as marking as read.
"""
def __init__(self, database_path):
self.PATH = database_path
def save_message(self, guid, handle, pubkey, subject, message_type, message,
timestamp, avatar_hash, signature, is_outgoing, msg_id=None):
"""
Store message in database.
"""
try:
conn = Database.connect_database(self.PATH)
with conn:
outgoing = 1 if is_outgoing else 0
msgID = digest(message + str(timestamp)).encode("hex") if msg_id is None else msg_id
cursor = conn.cursor()
cursor.execute('''INSERT INTO messages(msgID, guid, handle, pubkey, subject,
messageType, message, timestamp, avatarHash, signature, outgoing, read) VALUES (?,?,?,?,?,?,?,?,?,?,?,?)''',
(msgID, guid, handle, pubkey, subject, message_type,
message, timestamp, avatar_hash, signature, outgoing, 0))
conn.commit()
conn.close()
return True
except Exception:
return False
def get_messages(self, guid, message_type, msgID=None, limit=20):
"""
Return all messages matching guid and message_type.
"""
if msgID == None:
timestamp = 4294967295
else:
timestamp = self.get_timestamp(msgID)
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT guid, handle, pubkey, subject, messageType, message,
timestamp, avatarHash, signature, outgoing, read, msgID FROM messages
WHERE guid=? AND messageType=? AND timestamp<? ORDER BY timestamp DESC LIMIT 20''', (guid, message_type, timestamp))
ret = cursor.fetchall()
conn.close()
return ret
def get_order_messages(self, order_id):
"""
Return all messages matching guid and message_type.
"""
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT guid, handle, pubkey, subject, messageType, message, timestamp,
avatarHash, signature, outgoing, read FROM messages WHERE subject=? ''',
(order_id, ))
ret = cursor.fetchall()
conn.close()
return ret
def get_conversations(self):
"""
Get all 'conversations' composed of messages of type 'CHAT'.
Returns:
Array of dictionaries, one element for each guid. Dictionaries
include last message only.
"""
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT DISTINCT guid FROM messages''',)
guids = cursor.fetchall()
ret = []
unread = self.get_unread()
for g in guids:
cursor.execute('''SELECT avatarHash, message, max(timestamp), pubkey FROM messages
WHERE guid=? and messageType=?''', (g[0], "CHAT"))
val = cursor.fetchone()
avatar_hash = None
handle = ""
if val[0] is not None:
try:
with open(join(DATA_FOLDER, 'cache', g[0] + ".profile"), "r") as filename:
profile = filename.read()
p = objects.Profile()
p.ParseFromString(profile)
avatar_hash = p.avatar_hash.encode("hex")
handle = p.handle
except Exception:
cursor.execute('''SELECT avatarHash FROM messages
WHERE guid=? and messageType=? and avatarHash NOT NULL''', (g[0], "CHAT"))
avi = cursor.fetchone()
if avi[0] is not None:
avatar_hash = avi[0].encode("hex")
ret.append({"guid": g[0],
"avatar_hash": avatar_hash,
"handle": handle,
"last_message": val[1],
"timestamp": val[2],
"public_key": val[3].encode("hex"),
"unread": 0 if g[0] not in unread else unread[g[0]]})
conn.close()
return sanitize_html(ret)
def get_unread(self):
"""
Get Counter of guids which have unread, incoming messages.
"""
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT guid FROM messages WHERE read=0 and outgoing=0 and subject=""''',)
ret = []
guids = cursor.fetchall()
for g in guids:
ret.append(g[0])
conn.close()
return Counter(ret)
def get_timestamp(self, msgID):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT timestamp FROM messages WHERE msgID=? and messageType=?''', (msgID, "CHAT"))
ts = cursor.fetchone()[0]
if ts is None:
ts = 0
conn.close()
return ts
def mark_as_read(self, guid):
"""
Mark all messages for guid as read.
"""
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''UPDATE messages SET read=? WHERE guid=?;''', (1, guid))
conn.commit()
conn.close()
def delete_messages(self, guid):
"""
Delete all messages of type 'CHAT' for guid.
"""
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM messages WHERE guid=? AND messageType="CHAT"''', (guid, ))
conn.commit()
conn.close()
class NotificationStore(object):
"""
All notifications are stored here.
"""
def __init__(self, database_path):
self.PATH = database_path
def save_notification(self, notif_id, guid, handle, notif_type, order_id, title, timestamp, image_hash):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''INSERT INTO notifications(notifID, guid, handle, type, orderId, title, timestamp,
imageHash, read) VALUES (?,?,?,?,?,?,?,?,?)''', (notif_id, guid, handle, notif_type, order_id, title, timestamp,
image_hash, 0))
conn.commit()
conn.close()
def get_notifications(self, notif_id, limit):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
start = self.get_row(notif_id)
cursor.execute('''SELECT notifID, guid, handle, type, orderId, title, timestamp,
imageHash, read FROM notifications WHERE rowid <=? AND rowid > ?''', (start, start-limit))
ret = cursor.fetchall()
conn.close()
return ret
def get_row(self, notif_id):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT MAX(rowid) FROM notifications''')
max_row = cursor.fetchone()[0]
if max_row is None:
max_row = 0
cursor.execute('''SELECT rowid FROM notifications WHERE notifID=?''', (notif_id, ))
ret = cursor.fetchone()
conn.close()
return max_row if not ret else ret[0]
def mark_as_read(self, notif_id):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''UPDATE notifications SET read=? WHERE notifID=?;''', (1, notif_id))
conn.commit()
conn.close()
def get_unread_count(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT notifID FROM notifications WHERE read=?''', (0, ))
ret = cursor.fetchall()
conn.close()
return len(ret)
def delete_notification(self, notif_id):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM notifications WHERE notifID=?''', (notif_id,))
conn.commit()
conn.close()
class BroadcastStore(object):
"""
Stores broadcast messages that our node receives.
"""
def __init__(self, database_path):
self.PATH = database_path
def save_broadcast(self, broadcast_id, guid, handle, message, timestamp, avatar_hash):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''INSERT INTO broadcasts(id, guid, handle, message, timestamp, avatarHash)
VALUES (?,?,?,?,?,?)''', (broadcast_id, guid, handle, message, timestamp, avatar_hash))
conn.commit()
conn.close()
def get_broadcasts(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT id, guid, handle, message, timestamp, avatarHash FROM broadcasts''')
ret = cursor.fetchall()
conn.close()
return ret
def delete_broadcast(self, broadcast_id):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM broadcasts WHERE id=?''', (broadcast_id,))
conn.commit()
conn.close()
class VendorStore(object):
"""
Stores a list of vendors this node has heard about. Useful for
filling out data in the homepage.
"""
def __init__(self, database_path):
self.PATH = database_path
def save_vendor(self, guid, serialized_node):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
try:
cursor.execute('''INSERT OR REPLACE INTO vendors(guid, serializedNode)
VALUES (?,?)''', (guid, serialized_node))
except Exception as e:
print e.message
conn.commit()
conn.close()
def get_vendors(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT serializedNode FROM vendors''')
ret = cursor.fetchall()
nodes = {}
for n in ret:
try:
proto = objects.Node()
proto.ParseFromString(n[0])
node = Node(proto.guid,
proto.nodeAddress.ip,
proto.nodeAddress.port,
proto.publicKey,
None if not proto.HasField("relayAddress") else
(proto.relayAddress.ip, proto.relayAddress.port),
proto.natType,
proto.vendor)
nodes[node.id] = node
except Exception, e:
print e.message
conn.close()
return nodes
def delete_vendor(self, guid):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM vendors WHERE guid=?''', (guid,))
conn.commit()
conn.close()
class ModeratorStore(object):
"""
Stores a list of known moderators. A moderator must be saved here
for it to be used in a new listing.
"""
def __init__(self, database_path):
self.PATH = database_path
def save_moderator(self, guid, pubkey, bitcoin_key, bicoin_sig, name,
avatar_hash, fee, handle="", short_desc=""):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
try:
cursor.execute('''INSERT OR REPLACE INTO moderators(guid, pubkey, bitcoinKey,
bitcoinSignature, handle, name, description, avatar, fee)
VALUES (?,?,?,?,?,?,?,?,?)''', (guid, pubkey, bitcoin_key, bicoin_sig, handle,
name, short_desc, avatar_hash, fee))
except Exception as e:
print e.message
conn.commit()
conn.close()
def get_moderator(self, guid):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT * FROM moderators WHERE guid=?''', (guid,))
ret = cursor.fetchone()
conn.close()
return ret
def delete_moderator(self, guid):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM moderators WHERE guid=?''', (guid,))
conn.commit()
conn.close()
def clear_all(self, except_guids=None):
if except_guids is None:
except_guids = []
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM moderators WHERE guid NOT IN ({seq})'''.format(
seq=','.join(['?']*len(except_guids))), except_guids)
conn.commit()
conn.close()
class Purchases(object):
"""
Stores a list of this node's purchases.
"""
def __init__(self, database_path):
self.PATH = database_path
def new_purchase(self, order_id, title, description, timestamp, btc,
address, status, thumbnail, vendor, proofSig, contract_type):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
try:
cursor.execute('''INSERT OR REPLACE INTO purchases(id, title, description, timestamp, btc,
address, status, thumbnail, vendor, proofSig, contractType, unread) VALUES (?,?,?,?,?,?,?,?,?,?,?,?)''',
(order_id, title, description, timestamp, btc, address,
status, thumbnail, vendor, proofSig, contract_type, 0))
except Exception as e:
print e.message
conn.commit()
conn.close()
def get_purchase(self, order_id):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT id, title, description, timestamp, btc, address, status,
thumbnail, vendor, contractType, proofSig, unread FROM purchases WHERE id=?''', (order_id,))
ret = cursor.fetchall()
conn.close()
if not ret:
return None
else:
return ret[0]
def delete_purchase(self, order_id):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM purchases WHERE id=?''', (order_id,))
conn.commit()
conn.close()
def get_all(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT id, title, description, timestamp, btc, status,
thumbnail, vendor, contractType, unread FROM purchases ''')
ret = cursor.fetchall()
conn.close()
return ret
def get_unfunded(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT id, timestamp FROM purchases WHERE status=0 OR status=2''')
ret = cursor.fetchall()
conn.close()
return ret
def update_status(self, order_id, status):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''UPDATE purchases SET status=? WHERE id=?;''', (status, order_id))
conn.commit()
conn.close()
def get_status(self, order_id):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT status FROM purchases WHERE id=?''', (order_id,))
ret = cursor.fetchone()
conn.close()
if not ret:
return None
else:
return ret[0]
def update_unread(self, order_id, reset=False):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
if reset is False:
cursor.execute('''UPDATE purchases SET unread = unread + 1 WHERE id=?;''', (order_id,))
else:
cursor.execute('''UPDATE purchases SET unread=0 WHERE id=?;''', (order_id,))
conn.commit()
conn.close()
def update_outpoint(self, order_id, outpoint):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''UPDATE purchases SET outpoint=? WHERE id=?;''', (outpoint, order_id))
conn.commit()
conn.close()
def get_outpoint(self, order_id):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT outpoint FROM purchases WHERE id=?''', (order_id,))
ret = cursor.fetchone()
conn.close()
if not ret:
return None
else:
return ret[0]
def get_proof_sig(self, order_id):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT proofSig FROM purchases WHERE id=?''', (order_id,))
ret = cursor.fetchone()
conn.close()
if not ret:
return None
else:
return ret[0]
class Sales(object):
"""
Stores a list of this node's sales.
"""
def __init__(self, database_path):
self.PATH = database_path
def new_sale(self, order_id, title, description, timestamp, btc,
address, status, thumbnail, buyer, contract_type):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
try:
cursor.execute('''INSERT OR REPLACE INTO sales(id, title, description, timestamp, btc, address,
status, thumbnail, buyer, contractType, unread) VALUES (?,?,?,?,?,?,?,?,?,?,?)''',
(order_id, title, description, timestamp, btc, address, status,
thumbnail, buyer, contract_type, 0))
except Exception as e:
print e.message
conn.commit()
conn.close()
def get_sale(self, order_id):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT id, title, description, timestamp, btc, address, status,
thumbnail, buyer, contractType, unread FROM sales WHERE id=?''', (order_id,))
ret = cursor.fetchall()
conn.close()
if not ret:
return None
else:
return ret[0]
def delete_sale(self, order_id):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM sales WHERE id=?''', (order_id,))
conn.commit()
conn.close()
def get_all(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT id, title, description, timestamp, btc, status,
thumbnail, buyer, contractType, unread FROM sales ''')
ret = cursor.fetchall()
conn.close()
return ret
def get_unfunded(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT id, timestamp FROM sales WHERE status=0''')
ret = cursor.fetchall()
conn.close()
return ret
def update_status(self, order_id, status):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''UPDATE sales SET status=? WHERE id=?;''', (status, order_id))
conn.commit()
conn.close()
def get_status(self, order_id):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT status FROM sales WHERE id=?''', (order_id,))
ret = cursor.fetchone()
conn.close()
if not ret:
return None
else:
return ret[0]
def update_unread(self, order_id, reset=False):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
if reset is False:
cursor.execute('''UPDATE sales SET unread = unread + 1 WHERE id=?;''', (order_id,))
else:
cursor.execute('''UPDATE sales SET unread=0 WHERE id=?;''', (order_id,))
conn.commit()
conn.close()
def update_outpoint(self, order_id, outpoint):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''UPDATE sales SET outpoint=? WHERE id=?;''', (outpoint, order_id))
conn.commit()
conn.close()
def update_payment_tx(self, order_id, txid):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''UPDATE sales SET paymentTX=? WHERE id=?;''', (txid, order_id))
conn.commit()
conn.close()
def get_outpoint(self, order_id):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT outpoint FROM sales WHERE id=?''', (order_id,))
ret = cursor.fetchone()
conn.close()
if not ret:
return None
else:
return ret[0]
class Cases(object):
"""
Stores a list of this node's moderation cases.
"""
def __init__(self, database_path):
self.PATH = database_path
def new_case(self, order_id, title, timestamp, order_date, btc,
thumbnail, buyer, vendor, validation, claim):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
try:
cursor.execute('''INSERT OR REPLACE INTO cases(id, title, timestamp, orderDate, btc, thumbnail,
buyer, vendor, validation, claim, status, unread) VALUES (?,?,?,?,?,?,?,?,?,?,?,?)''',
(order_id, title, timestamp, order_date, btc,
thumbnail, buyer, vendor, validation, claim, 0, 0))
except Exception as e:
print e.message
conn.commit()
conn.close()
def delete_case(self, order_id):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM cases WHERE id=?''', (order_id,))
conn.commit()
conn.close()
def get_all(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT id, title, timestamp, orderDate, btc, thumbnail,
buyer, vendor, validation, claim, status, unread FROM cases ''')
ret = cursor.fetchall()
conn.close()
return ret
def update_unread(self, order_id, reset=False):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
if reset is False:
cursor.execute('''UPDATE cases SET unread = unread + 1 WHERE id=?;''', (order_id,))
else:
cursor.execute('''UPDATE cases SET unread=0 WHERE id=?;''', (order_id,))
conn.commit()
conn.close()
def get_claim(self, order_id):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT claim FROM cases WHERE id=?''', (order_id,))
ret = cursor.fetchone()
conn.close()
if not ret:
return None
else:
return ret[0]
def update_status(self, order_id, status):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''UPDATE cases SET status=? WHERE id=?;''', (status, order_id))
conn.commit()
conn.close()
class Ratings(object):
"""
Store ratings for each contract in the db.
"""
def __init__(self, database_path):
self.PATH = database_path
def add_rating(self, listing_hash, rating):
conn = Database.connect_database(self.PATH)
with conn:
rating_id = digest(rating).encode("hex")
cursor = conn.cursor()
cursor.execute('''INSERT INTO ratings(listing, ratingID, rating) VALUES (?,?,?)''',
(listing_hash, rating_id, rating))
conn.commit()
conn.close()
def get_listing_ratings(self, listing_hash, starting_id=None):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
if starting_id is None:
cursor.execute('''SELECT rating FROM ratings WHERE listing=?''', (listing_hash,))
ret = cursor.fetchall()
conn.close()
return ret
else:
cursor.execute('''SELECT rowid FROM ratings WHERE ratingID=?''', (starting_id, ))
row_id = cursor.fetchone()
if row_id is None:
conn.close()
return None
else:
cursor.execute('''SELECT rating FROM ratings WHERE rowid>? AND listing=?''',
(row_id, listing_hash))
ret = cursor.fetchall()
conn.close()
return ret
def get_all_ratings(self, starting_id=None):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
if starting_id is None:
cursor.execute('''SELECT rating FROM ratings''')
ret = cursor.fetchall()
conn.close()
return ret
else:
cursor.execute('''SELECT rowid FROM ratings WHERE ratingID=?''', (starting_id, ))
row_id = cursor.fetchone()
if row_id is None:
conn.close()
return None
else:
cursor.execute('''SELECT rating FROM ratings WHERE rowid>?''', (row_id, ))
ret = cursor.fetchall()
conn.close()
return ret
class Transactions(object):
"""
Store transactions that we broadcast to the network but have yet to confirm.
The transactions should be periodically rebroadcast to ensure they make it in the chain.
"""
def __init__(self, database_path):
self.PATH = database_path
def add_transaction(self, tx):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''INSERT INTO transactions(tx) VALUES (?)''', (tx,))
conn.commit()
conn.close()
def delete_transaction(self, tx):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM transactions WHERE tx=?''', (tx,))
conn.commit()
conn.close()
def get_transactions(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT tx FROM transactions''')
ret = cursor.fetchall()
conn.close()
return ret
class Settings(object):
"""
Stores the UI settings.
"""
def __init__(self, database_path):
self.PATH = database_path
def update(self, refundAddress, currencyCode, country, language, timeZone, notifications,
shipping_addresses, blocked, terms_conditions, refund_policy, moderator_list, smtp_notifications,
smtp_server, smtp_sender, smtp_recipient, smtp_username, smtp_password):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''INSERT OR REPLACE INTO settings(id, refundAddress, currencyCode, country,
language, timeZone, notifications, shippingAddresses, blocked, termsConditions,
refundPolicy, moderatorList, smtpNotifications, smtpServer, smtpSender,
smtpRecipient, smtpUsername, smtpPassword) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)''',
(1, refundAddress, currencyCode, country, language, timeZone,
notifications, shipping_addresses, blocked, terms_conditions,
refund_policy, moderator_list, smtp_notifications, smtp_server,
smtp_sender, smtp_recipient, smtp_username, smtp_password))
conn.commit()
conn.close()
def get(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT * FROM settings WHERE id=1''')
ret = cursor.fetchone()
conn.close()
return ret
def set_credentials(self, username, password):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''INSERT OR REPLACE INTO settings(id, username, password) VALUES (?,?,?)''',
(2, username, password))
conn.commit()
conn.close()
def get_credentials(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT username, password FROM settings WHERE id=2''')
ret = cursor.fetchone()
conn.close()
return ret
| {
"repo_name": "tomgalloway/OpenBazaar-Server",
"path": "db/datastore.py",
"copies": "1",
"size": "49283",
"license": "mit",
"hash": 9141307854253927000,
"line_mean": 35.9161048689,
"line_max": 116,
"alpha_frac": 0.5699531279,
"autogenerated": false,
"ratio": 4.3348579470489925,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5404811074948992,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import os
import sqlite3 as lite
import time
from api.utils import sanitize_html
from collections import Counter
from config import DATA_FOLDER
from dht.node import Node
from dht.utils import digest
from protos import objects
from protos.objects import Listings, Followers, Following
from os.path import join
from db.migrations import migration1, migration2, migration3, migration4, migration5, migration6, migration7
class Database(object):
__slots__ = ['PATH', 'filemap', 'profile', 'listings', 'keys', 'follow', 'messages',
'notifications', 'broadcasts', 'vendors', 'moderators', 'purchases', 'sales',
'cases', 'ratings', 'transactions', 'settings', 'audit_shopping']
def __init__(self, testnet=False, filepath=None):
object.__setattr__(self, 'PATH', self._database_path(testnet, filepath))
object.__setattr__(self, 'filemap', HashMap(self.PATH))
object.__setattr__(self, 'profile', ProfileStore(self.PATH))
object.__setattr__(self, 'listings', ListingsStore(self.PATH))
object.__setattr__(self, 'keys', KeyStore(self.PATH))
object.__setattr__(self, 'follow', FollowData(self.PATH))
object.__setattr__(self, 'messages', MessageStore(self.PATH))
object.__setattr__(self, 'notifications', NotificationStore(self.PATH))
object.__setattr__(self, 'broadcasts', BroadcastStore(self.PATH))
object.__setattr__(self, 'vendors', VendorStore(self.PATH))
object.__setattr__(self, 'moderators', ModeratorStore(self.PATH))
object.__setattr__(self, 'purchases', Purchases(self.PATH))
object.__setattr__(self, 'sales', Sales(self.PATH))
object.__setattr__(self, 'cases', Cases(self.PATH))
object.__setattr__(self, 'ratings', Ratings(self.PATH))
object.__setattr__(self, 'transactions', Transactions(self.PATH))
object.__setattr__(self, 'settings', Settings(self.PATH))
object.__setattr__(self, 'audit_shopping', ShoppingEvents(self.PATH))
self._initialize_datafolder_tree()
self._initialize_database(self.PATH)
def get_database_path(self):
return self.PATH
def _initialize_database(self, database_path):
"""
Create database, if not present, and clear cache.
"""
if not database_path:
raise RuntimeError('attempted to initialize empty path')
if not os.path.isfile(database_path):
self._create_database(database_path)
cache = join(DATA_FOLDER, "cache.pickle")
if os.path.exists(cache):
os.remove(cache)
self._run_migrations()
@staticmethod
def _database_path(testnet, filepath):
'''
Get database pathname.
Args:
testnet: Boolean
filename: If provided, overrides testnet
'''
path = ''
if filepath:
path = filepath
elif testnet:
path = join(DATA_FOLDER, "OB-Testnet.db")
else:
path = join(DATA_FOLDER, "OB-Mainnet.db")
return path
@staticmethod
def connect_database(path):
conn = lite.connect(path)
conn.text_factory = str
return conn
@staticmethod
def _initialize_datafolder_tree():
"""
Creates, if not present, directory tree in DATA_FOLDER.
"""
tree = [
['cache'],
['store', 'contracts', 'listings'],
['store', 'contracts', 'in progress'],
['store', 'contracts', 'unfunded'],
['store', 'contracts', 'trade receipts'],
['store', 'media'],
['purchases', 'in progress'],
['purchases', 'unfunded'],
['purchases', 'trade receipts'],
['cases']
]
path = ''
for sub_tree in tree:
path = DATA_FOLDER
for directory in sub_tree:
path = join(path, directory)
if not os.path.exists(path):
os.makedirs(path, mode=0o750)
@staticmethod
def _create_database(database_path):
conn = lite.connect(database_path)
cursor = conn.cursor()
cursor.execute('''PRAGMA user_version = 6''')
cursor.execute('''CREATE TABLE hashmap(hash TEXT PRIMARY KEY, filepath TEXT)''')
cursor.execute('''CREATE TABLE profile(id INTEGER PRIMARY KEY, serializedUserInfo BLOB, tempHandle TEXT)''')
cursor.execute('''CREATE TABLE listings(id INTEGER PRIMARY KEY, serializedListings BLOB)''')
cursor.execute('''CREATE TABLE keys(type TEXT PRIMARY KEY, privkey BLOB, pubkey BLOB)''')
cursor.execute('''CREATE TABLE followers(guid TEXT UNIQUE, serializedFollower TEXT)''')
cursor.execute('''CREATE INDEX index_followers ON followers(serializedFollower);''')
cursor.execute('''CREATE TABLE following(id INTEGER PRIMARY KEY, serializedFollowing BLOB)''')
cursor.execute('''CREATE TABLE messages(msgID TEXT PRIMARY KEY, guid TEXT, handle TEXT, pubkey BLOB,
subject TEXT, messageType TEXT, message TEXT, timestamp INTEGER, avatarHash BLOB, signature BLOB,
outgoing INTEGER, read INTEGER)''')
cursor.execute('''CREATE INDEX index_guid ON messages(guid);''')
cursor.execute('''CREATE INDEX index_subject ON messages(subject);''')
cursor.execute('''CREATE INDEX index_messages_read ON messages(read);''')
cursor.execute('''CREATE INDEX index_timestamp ON messages(timestamp);''')
cursor.execute('''CREATE TABLE notifications(notifID TEXT UNIQUE, guid BLOB, handle TEXT, type TEXT,
orderId TEXT, title TEXT, timestamp INTEGER, imageHash BLOB, read INTEGER)''')
cursor.execute('''CREATE INDEX index_notification_read ON notifications(read);''')
cursor.execute('''CREATE TABLE broadcasts(id TEXT PRIMARY KEY, guid BLOB, handle TEXT, message TEXT,
timestamp INTEGER, avatarHash BLOB)''')
cursor.execute('''CREATE TABLE vendors(guid TEXT PRIMARY KEY, serializedNode BLOB)''')
cursor.execute('''CREATE TABLE moderators(guid TEXT PRIMARY KEY, pubkey BLOB, bitcoinKey BLOB,
bitcoinSignature BLOB, handle TEXT, name TEXT, description TEXT, avatar BLOB, fee FLOAT)''')
cursor.execute('''CREATE TABLE purchases(id TEXT PRIMARY KEY, title TEXT, description TEXT,
timestamp INTEGER, btc FLOAT, address TEXT, status INTEGER, outpoint BLOB, thumbnail BLOB, vendor TEXT,
proofSig BLOB, contractType TEXT, unread INTEGER, statusChanged INTEGER)''')
cursor.execute('''CREATE TABLE sales(id TEXT PRIMARY KEY, title TEXT, description TEXT,
timestamp INTEGER, btc REAL, address TEXT, status INTEGER, thumbnail BLOB, outpoint BLOB, buyer TEXT,
paymentTX TEXT, contractType TEXT, unread INTEGER, statusChanged INTEGER)''')
cursor.execute('''CREATE TABLE cases(id TEXT PRIMARY KEY, title TEXT, timestamp INTEGER, orderDate TEXT,
btc REAL, thumbnail BLOB, buyer TEXT, vendor TEXT, validation TEXT, claim TEXT, status INTEGER,
unread INTEGER, statusChanged INTEGER)''')
cursor.execute('''CREATE TABLE ratings(listing TEXT, ratingID TEXT, rating TEXT)''')
cursor.execute('''CREATE INDEX index_listing ON ratings(listing);''')
cursor.execute('''CREATE INDEX index_rating_id ON ratings(ratingID);''')
cursor.execute('''CREATE TABLE transactions(tx BLOB);''')
cursor.execute('''CREATE TABLE settings(id INTEGER PRIMARY KEY, refundAddress TEXT, currencyCode TEXT,
country TEXT, language TEXT, timeZone TEXT, notifications INTEGER, shippingAddresses BLOB, blocked BLOB,
termsConditions TEXT, refundPolicy TEXT, moderatorList BLOB, username TEXT, password TEXT,
smtpNotifications INTEGER, smtpServer TEXT, smtpSender TEXT, smtpRecipient TEXT, smtpUsername TEXT,
smtpPassword TEXT)''')
cursor.execute('''CREATE TABLE IF NOT EXISTS audit_shopping (
audit_shopping_id integer PRIMARY KEY NOT NULL,
shopper_guid text NOT NULL,
contract_hash text NOT NULL,
"timestamp" integer NOT NULL,
action_id integer NOT NULL
);''')
cursor.execute('''CREATE INDEX IF NOT EXISTS shopper_guid_index ON audit_shopping
(audit_shopping_id ASC);''')
cursor.execute('''CREATE INDEX IF NOT EXISTS action_id_index ON audit_shopping (audit_shopping_id ASC);''')
conn.commit()
conn.close()
def _run_migrations(self):
conn = self.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''PRAGMA user_version''')
version = cursor.fetchone()[0]
conn.close()
if version == 0:
migration1.migrate(self.PATH)
migration2.migrate(self.PATH)
migration3.migrate(self.PATH)
migration4.migrate(self.PATH)
migration5.migrate(self.PATH)
migration6.migrate(self.PATH)
elif version == 1:
migration2.migrate(self.PATH)
migration3.migrate(self.PATH)
migration4.migrate(self.PATH)
migration5.migrate(self.PATH)
migration6.migrate(self.PATH)
elif version == 2:
migration3.migrate(self.PATH)
migration4.migrate(self.PATH)
migration5.migrate(self.PATH)
migration6.migrate(self.PATH)
elif version == 3:
migration4.migrate(self.PATH)
migration5.migrate(self.PATH)
migration6.migrate(self.PATH)
elif version == 4:
migration5.migrate(self.PATH)
migration6.migrate(self.PATH)
elif version == 5:
migration6.migrate(self.PATH)
migration7.migrate(self.PATH)
elif version == 6:
migration7.migrate(self.PATH)
class HashMap(object):
"""
Creates a table in the database for mapping file hashes (which are sent
over the wire in a query) with a more human readable filename in local
storage. This is useful for users who want to look through their store
data on disk.
"""
def __init__(self, database_path):
self.PATH = database_path
def insert(self, hash_value, filepath):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''INSERT OR REPLACE INTO hashmap(hash, filepath)
VALUES (?,?)''', (hash_value, filepath))
conn.commit()
conn.close()
def get_file(self, hash_value):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT filepath FROM hashmap WHERE hash=?''', (hash_value,))
ret = cursor.fetchone()
conn.close()
if ret is None:
return None
return DATA_FOLDER + ret[0]
def get_all(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT * FROM hashmap ''')
ret = cursor.fetchall()
conn.close()
return ret
def delete(self, hash_value):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM hashmap WHERE hash = ?''', (hash_value,))
conn.commit()
conn.close()
def delete_all(self):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM hashmap''')
conn.commit()
conn.close()
class ProfileStore(object):
"""
Stores the user's profile data in the db. The profile is stored as a serialized
Profile protobuf object. It's done this way because because protobuf is more
flexible and allows for storing custom repeated fields (like the SocialAccount
object). Also we will just serve this over the wire so we don't have to manually
rebuild it every startup. To interact with the profile you should use the
`market.profile` module and not this class directly.
"""
def __init__(self, database_path):
self.PATH = database_path
def set_proto(self, proto):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
handle = self.get_temp_handle()
cursor.execute('''INSERT OR REPLACE INTO profile(id, serializedUserInfo, tempHandle)
VALUES (?,?,?)''', (1, proto, handle))
conn.commit()
conn.close()
def get_proto(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT serializedUserInfo FROM profile WHERE id = 1''')
ret = cursor.fetchone()
conn.close()
if ret is None:
return None
return ret[0]
def set_temp_handle(self, handle):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
if self.get_proto() is None:
cursor.execute('''INSERT OR REPLACE INTO profile(id, tempHandle)
VALUES (?,?)''', (1, handle))
else:
cursor.execute('''UPDATE profile SET tempHandle=? WHERE id=?;''', (handle, 1))
conn.commit()
conn.close()
def get_temp_handle(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT tempHandle FROM profile WHERE id = 1''')
ret = cursor.fetchone()
conn.close()
if ret is None:
return ""
else:
return ret[0]
class ListingsStore(object):
"""
Stores a serialized `Listings` protobuf object. It contains metadata for all the
contracts hosted by this store. We will send this in response to a GET_LISTING
query. This should be updated each time a new contract is created.
"""
def __init__(self, database_path):
self.PATH = database_path
def add_listing(self, proto):
"""
Will also update an existing listing if the contract hash is the same.
"""
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
l = Listings()
ser = self.get_proto()
if ser is not None:
l.ParseFromString(ser)
for listing in l.listing:
if listing.contract_hash == proto.contract_hash:
l.listing.remove(listing)
l.listing.extend([proto])
cursor.execute('''INSERT OR REPLACE INTO listings(id, serializedListings)
VALUES (?,?)''', (1, l.SerializeToString()))
conn.commit()
conn.close()
def delete_listing(self, hash_value):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
ser = self.get_proto()
if ser is None:
return
l = Listings()
l.ParseFromString(ser)
for listing in l.listing:
if listing.contract_hash == hash_value:
l.listing.remove(listing)
cursor.execute('''INSERT OR REPLACE INTO listings(id, serializedListings)
VALUES (?,?)''', (1, l.SerializeToString()))
conn.commit()
conn.close()
def delete_all_listings(self):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM listings''')
conn.commit()
conn.close()
def get_proto(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT serializedListings FROM listings WHERE id = 1''')
ret = cursor.fetchone()
conn.close()
if ret is None:
return None
return ret[0]
class KeyStore(object):
"""
Stores the keys for this node.
"""
def __init__(self, database_path):
self.PATH = database_path
def set_key(self, key_type, privkey, pubkey):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''INSERT OR REPLACE INTO keys(type, privkey, pubkey)
VALUES (?,?,?)''', (key_type, privkey, pubkey))
conn.commit()
conn.close()
def get_key(self, key_type):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT privkey, pubkey FROM keys WHERE type=?''', (key_type,))
ret = cursor.fetchone()
conn.close()
if not ret:
return None
else:
return ret
def delete_all_keys(self):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM keys''')
conn.commit()
conn.close()
class FollowData(object):
"""
A class for saving and retrieving follower and following data
for this node.
"""
def __init__(self, database_path):
self.PATH = database_path
def follow(self, proto):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
f = Following()
ser = self.get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == proto.guid:
f.users.remove(user)
f.users.extend([proto])
cursor.execute('''INSERT OR REPLACE INTO following(id, serializedFollowing) VALUES (?,?)''',
(1, f.SerializeToString()))
conn.commit()
conn.close()
def unfollow(self, guid):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
f = Following()
ser = self.get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == guid:
f.users.remove(user)
cursor.execute('''INSERT OR REPLACE INTO following(id, serializedFollowing) VALUES (?,?)''',
(1, f.SerializeToString()))
conn.commit()
conn.close()
def get_following(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT serializedFollowing FROM following WHERE id=1''')
ret = cursor.fetchall()
conn.close()
if not ret:
return None
else:
return ret[0][0]
def is_following(self, guid):
f = Following()
ser = self.get_following()
if ser is not None:
f.ParseFromString(ser)
for user in f.users:
if user.guid == guid:
return True
return False
def set_follower(self, proto):
conn = Database.connect_database(self.PATH)
p = Followers.Follower()
p.ParseFromString(proto)
with conn:
cursor = conn.cursor()
cursor.execute('''INSERT OR REPLACE INTO followers(guid, serializedFollower) VALUES (?,?)''',
(p.guid.encode("hex"), proto.encode("hex")))
conn.commit()
conn.close()
def delete_follower(self, guid):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM followers WHERE guid=?''', (guid.encode("hex"), ))
conn.commit()
conn.close()
def get_followers(self, start=0):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT Count(*) FROM followers''')
count = cursor.fetchone()[0]
f = Followers()
if count > 0:
smt = '''select serializedFollower from followers order by rowid desc limit 30 offset ''' + str(start)
cursor.execute(smt)
serialized_followers = cursor.fetchall()
conn.close()
for proto in serialized_followers:
p = Followers.Follower()
p.ParseFromString(proto[0].decode("hex"))
f.followers.extend([p])
return (f.SerializeToString(), count)
class MessageStore(object):
"""
Stores all of the chat messages for this node and allows retrieval of
messages and conversations as well as marking as read.
"""
def __init__(self, database_path):
self.PATH = database_path
def save_message(self, guid, handle, pubkey, subject, message_type, message,
timestamp, avatar_hash, signature, is_outgoing, msg_id=None):
"""
Store message in database.
"""
try:
conn = Database.connect_database(self.PATH)
with conn:
outgoing = 1 if is_outgoing else 0
msgID = digest(message + str(timestamp)).encode("hex") if msg_id is None else msg_id
cursor = conn.cursor()
cursor.execute('''INSERT INTO messages(msgID, guid, handle, pubkey, subject,
messageType, message, timestamp, avatarHash, signature, outgoing, read) VALUES (?,?,?,?,?,?,?,?,?,?,?,?)''',
(msgID, guid, handle, pubkey, subject, message_type,
message, timestamp, avatar_hash, signature, outgoing, 0))
conn.commit()
conn.close()
return True
except Exception:
return False
def get_messages(self, guid, message_type, msgID=None, limit=20):
"""
Return all messages matching guid and message_type.
"""
if msgID == None:
timestamp = 4294967295
else:
timestamp = self.get_timestamp(msgID)
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT guid, handle, pubkey, subject, messageType, message,
timestamp, avatarHash, signature, outgoing, read, msgID FROM messages
WHERE guid=? AND messageType=? AND timestamp<? ORDER BY timestamp DESC LIMIT 20''', (guid, message_type, timestamp))
ret = cursor.fetchall()
conn.close()
return ret
def get_order_messages(self, order_id):
"""
Return all messages matching guid and message_type.
"""
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT guid, handle, pubkey, subject, messageType, message, timestamp,
avatarHash, signature, outgoing, read FROM messages WHERE subject=? ''',
(order_id, ))
ret = cursor.fetchall()
conn.close()
return ret
def get_conversations(self):
"""
Get all 'conversations' composed of messages of type 'CHAT'.
Returns:
Array of dictionaries, one element for each guid. Dictionaries
include last message only.
"""
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT DISTINCT guid FROM messages''',)
guids = cursor.fetchall()
ret = []
unread = self.get_unread()
for g in guids:
cursor.execute('''SELECT avatarHash, message, max(timestamp), pubkey FROM messages
WHERE guid=? and messageType=?''', (g[0], "CHAT"))
val = cursor.fetchone()
avatar_hash = None
handle = ""
if val[0] is not None:
try:
with open(join(DATA_FOLDER, 'cache', g[0] + ".profile"), "r") as filename:
profile = filename.read()
p = objects.Profile()
p.ParseFromString(profile)
avatar_hash = p.avatar_hash.encode("hex")
handle = p.handle
except Exception:
cursor.execute('''SELECT avatarHash FROM messages
WHERE guid=? and messageType=? and avatarHash NOT NULL''', (g[0], "CHAT"))
avi = cursor.fetchone()
if avi[0] is not None:
avatar_hash = avi[0].encode("hex")
ret.append({"guid": g[0],
"avatar_hash": avatar_hash,
"handle": handle,
"last_message": val[1],
"timestamp": val[2],
"public_key": val[3].encode("hex"),
"unread": 0 if g[0] not in unread else unread[g[0]]})
conn.close()
return sanitize_html(ret)
def get_unread(self):
"""
Get Counter of guids which have unread, incoming messages.
"""
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT guid FROM messages WHERE read=0 and outgoing=0 and subject=""''',)
ret = []
guids = cursor.fetchall()
for g in guids:
ret.append(g[0])
conn.close()
return Counter(ret)
def get_timestamp(self, msgID):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT timestamp FROM messages WHERE msgID=? and messageType=?''', (msgID, "CHAT"))
ts = cursor.fetchone()[0]
if ts is None:
ts = 0
conn.close()
return ts
def mark_as_read(self, guid):
"""
Mark all messages for guid as read.
"""
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''UPDATE messages SET read=? WHERE guid=?;''', (1, guid))
conn.commit()
conn.close()
def delete_messages(self, guid):
"""
Delete all messages of type 'CHAT' for guid.
"""
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM messages WHERE guid=? AND messageType="CHAT"''', (guid, ))
conn.commit()
conn.close()
class NotificationStore(object):
"""
All notifications are stored here.
"""
def __init__(self, database_path):
self.PATH = database_path
def save_notification(self, notif_id, guid, handle, notif_type, order_id, title, timestamp, image_hash):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''INSERT INTO notifications(notifID, guid, handle, type, orderId, title, timestamp,
imageHash, read) VALUES (?,?,?,?,?,?,?,?,?)''', (notif_id, guid, handle, notif_type, order_id, title, timestamp,
image_hash, 0))
conn.commit()
conn.close()
def get_notifications(self, notif_id, limit):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
start = self.get_row(notif_id)
cursor.execute('''SELECT notifID, guid, handle, type, orderId, title, timestamp,
imageHash, read FROM notifications WHERE rowid <=? AND rowid > ?''', (start, start-limit))
ret = cursor.fetchall()
conn.close()
return ret
def get_row(self, notif_id):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT MAX(rowid) FROM notifications''')
max_row = cursor.fetchone()[0]
if max_row is None:
max_row = 0
cursor.execute('''SELECT rowid FROM notifications WHERE notifID=?''', (notif_id, ))
ret = cursor.fetchone()
conn.close()
return max_row if not ret else ret[0]
def mark_as_read(self, notif_id):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''UPDATE notifications SET read=? WHERE notifID=?;''', (1, notif_id))
conn.commit()
conn.close()
def get_unread_count(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT notifID FROM notifications WHERE read=?''', (0, ))
ret = cursor.fetchall()
conn.close()
return len(ret)
def delete_notification(self, notif_id):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM notifications WHERE notifID=?''', (notif_id,))
conn.commit()
conn.close()
class BroadcastStore(object):
"""
Stores broadcast messages that our node receives.
"""
def __init__(self, database_path):
self.PATH = database_path
def save_broadcast(self, broadcast_id, guid, handle, message, timestamp, avatar_hash):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''INSERT INTO broadcasts(id, guid, handle, message, timestamp, avatarHash)
VALUES (?,?,?,?,?,?)''', (broadcast_id, guid, handle, message, timestamp, avatar_hash))
conn.commit()
conn.close()
def get_broadcasts(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT id, guid, handle, message, timestamp, avatarHash FROM broadcasts''')
ret = cursor.fetchall()
conn.close()
return ret
def delete_broadcast(self, broadcast_id):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM broadcasts WHERE id=?''', (broadcast_id,))
conn.commit()
conn.close()
class VendorStore(object):
"""
Stores a list of vendors this node has heard about. Useful for
filling out data in the homepage.
"""
def __init__(self, database_path):
self.PATH = database_path
def save_vendor(self, guid, serialized_node):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
try:
cursor.execute('''INSERT OR REPLACE INTO vendors(guid, serializedNode)
VALUES (?,?)''', (guid, serialized_node))
except Exception as e:
print e.message
conn.commit()
conn.close()
def get_vendors(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT serializedNode FROM vendors''')
ret = cursor.fetchall()
nodes = {}
for n in ret:
try:
proto = objects.Node()
proto.ParseFromString(n[0])
node = Node(proto.guid,
proto.nodeAddress.ip,
proto.nodeAddress.port,
proto.publicKey,
None if not proto.HasField("relayAddress") else
(proto.relayAddress.ip, proto.relayAddress.port),
proto.natType,
proto.vendor)
nodes[node.id] = node
except Exception, e:
print e.message
conn.close()
return nodes
def delete_vendor(self, guid):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM vendors WHERE guid=?''', (guid,))
conn.commit()
conn.close()
class ModeratorStore(object):
"""
Stores a list of known moderators. A moderator must be saved here
for it to be used in a new listing.
"""
def __init__(self, database_path):
self.PATH = database_path
def save_moderator(self, guid, pubkey, bitcoin_key, bicoin_sig, name,
avatar_hash, fee, handle="", short_desc=""):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
try:
cursor.execute('''INSERT OR REPLACE INTO moderators(guid, pubkey, bitcoinKey,
bitcoinSignature, handle, name, description, avatar, fee)
VALUES (?,?,?,?,?,?,?,?,?)''', (guid, pubkey, bitcoin_key, bicoin_sig, handle,
name, short_desc, avatar_hash, fee))
except Exception as e:
print e.message
conn.commit()
conn.close()
def get_moderator(self, guid):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT * FROM moderators WHERE guid=?''', (guid,))
ret = cursor.fetchone()
conn.close()
return ret
def delete_moderator(self, guid):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM moderators WHERE guid=?''', (guid,))
conn.commit()
conn.close()
def clear_all(self, except_guids=None):
if except_guids is None:
except_guids = []
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM moderators WHERE guid NOT IN ({seq})'''.format(
seq=','.join(['?']*len(except_guids))), except_guids)
conn.commit()
conn.close()
class Purchases(object):
"""
Stores a list of this node's purchases.
"""
def __init__(self, database_path):
self.PATH = database_path
def new_purchase(self, order_id, title, description, timestamp, btc,
address, status, thumbnail, vendor, proofSig, contract_type):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
try:
cursor.execute('''INSERT OR REPLACE INTO purchases(id, title, description, timestamp, btc,
address, status, thumbnail, vendor, proofSig, contractType, unread) VALUES (?,?,?,?,?,?,?,?,?,?,?,?)''',
(order_id, title, description, timestamp, btc, address,
status, thumbnail, vendor, proofSig, contract_type, 0))
except Exception as e:
print e.message
conn.commit()
conn.close()
def get_purchase(self, order_id):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT id, title, description, timestamp, btc, address, status,
thumbnail, vendor, contractType, proofSig, unread FROM purchases WHERE id=?''', (order_id,))
ret = cursor.fetchall()
conn.close()
if not ret:
return None
else:
return ret[0]
def delete_purchase(self, order_id):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM purchases WHERE id=?''', (order_id,))
conn.commit()
conn.close()
def get_all(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT id, title, description, timestamp, btc, status,
thumbnail, vendor, contractType, unread, statusChanged FROM purchases ''')
ret = cursor.fetchall()
conn.close()
return ret
def get_unfunded(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT id, timestamp FROM purchases WHERE status=0 OR status=2''')
ret = cursor.fetchall()
conn.close()
return ret
def update_status(self, order_id, status):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''UPDATE purchases SET status=? WHERE id=?;''', (status, order_id))
conn.commit()
conn.close()
def status_changed(self, order_id, status):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''UPDATE purchases SET statusChanged=? WHERE id=?;''', (status, order_id))
conn.commit()
conn.close()
def get_status(self, order_id):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT status FROM purchases WHERE id=?''', (order_id,))
ret = cursor.fetchone()
conn.close()
if not ret:
return None
else:
return ret[0]
def update_unread(self, order_id, reset=False):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
if reset is False:
cursor.execute('''UPDATE purchases SET unread = unread + 1 WHERE id=?;''', (order_id,))
else:
cursor.execute('''UPDATE purchases SET unread=0 WHERE id=?;''', (order_id,))
conn.commit()
conn.close()
def update_outpoint(self, order_id, outpoint):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''UPDATE purchases SET outpoint=? WHERE id=?;''', (outpoint, order_id))
conn.commit()
conn.close()
def get_outpoint(self, order_id):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT outpoint FROM purchases WHERE id=?''', (order_id,))
ret = cursor.fetchone()
conn.close()
if not ret:
return None
else:
return ret[0]
def get_proof_sig(self, order_id):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT proofSig FROM purchases WHERE id=?''', (order_id,))
ret = cursor.fetchone()
conn.close()
if not ret:
return None
else:
return ret[0]
class Sales(object):
"""
Stores a list of this node's sales.
"""
def __init__(self, database_path):
self.PATH = database_path
def new_sale(self, order_id, title, description, timestamp, btc,
address, status, thumbnail, buyer, contract_type):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
try:
cursor.execute('''INSERT OR REPLACE INTO sales(id, title, description, timestamp, btc, address,
status, thumbnail, buyer, contractType, unread) VALUES (?,?,?,?,?,?,?,?,?,?,?)''',
(order_id, title, description, timestamp, btc, address, status,
thumbnail, buyer, contract_type, 0))
except Exception as e:
print e.message
conn.commit()
conn.close()
def get_sale(self, order_id):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT id, title, description, timestamp, btc, address, status,
thumbnail, buyer, contractType, unread FROM sales WHERE id=?''', (order_id,))
ret = cursor.fetchall()
conn.close()
if not ret:
return None
else:
return ret[0]
def delete_sale(self, order_id):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM sales WHERE id=?''', (order_id,))
conn.commit()
conn.close()
def get_all(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT id, title, description, timestamp, btc, status,
thumbnail, buyer, contractType, unread, statusChanged FROM sales ''')
ret = cursor.fetchall()
conn.close()
return ret
def get_by_status(self, status):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT id, title, description, timestamp, btc, status,
thumbnail, buyer, contractType, unread, statusChanged FROM sales WHERE
status=?''', (status,))
ret = cursor.fetchall()
conn.close()
return ret
def get_unfunded(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT id, timestamp FROM sales WHERE status=0''')
ret = cursor.fetchall()
conn.close()
return ret
def update_status(self, order_id, status):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''UPDATE sales SET status=? WHERE id=?;''', (status, order_id))
conn.commit()
conn.close()
def status_changed(self, order_id, status):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''UPDATE sales SET statusChanged=? WHERE id=?;''', (status, order_id))
conn.commit()
conn.close()
def get_status(self, order_id):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT status FROM sales WHERE id=?''', (order_id,))
ret = cursor.fetchone()
conn.close()
if not ret:
return None
else:
return ret[0]
def update_unread(self, order_id, reset=False):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
if reset is False:
cursor.execute('''UPDATE sales SET unread = unread + 1 WHERE id=?;''', (order_id,))
else:
cursor.execute('''UPDATE sales SET unread=0 WHERE id=?;''', (order_id,))
conn.commit()
conn.close()
def update_outpoint(self, order_id, outpoint):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''UPDATE sales SET outpoint=? WHERE id=?;''', (outpoint, order_id))
conn.commit()
conn.close()
def update_payment_tx(self, order_id, txid):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''UPDATE sales SET paymentTX=? WHERE id=?;''', (txid, order_id))
conn.commit()
conn.close()
def get_outpoint(self, order_id):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT outpoint FROM sales WHERE id=?''', (order_id,))
ret = cursor.fetchone()
conn.close()
if not ret:
return None
else:
return ret[0]
class Cases(object):
"""
Stores a list of this node's moderation cases.
"""
def __init__(self, database_path):
self.PATH = database_path
def new_case(self, order_id, title, timestamp, order_date, btc,
thumbnail, buyer, vendor, validation, claim):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
try:
cursor.execute('''INSERT OR REPLACE INTO cases(id, title, timestamp, orderDate, btc, thumbnail,
buyer, vendor, validation, claim, status, unread) VALUES (?,?,?,?,?,?,?,?,?,?,?,?)''',
(order_id, title, timestamp, order_date, btc,
thumbnail, buyer, vendor, validation, claim, 0, 0))
except Exception as e:
print e.message
conn.commit()
conn.close()
def delete_case(self, order_id):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM cases WHERE id=?''', (order_id,))
conn.commit()
conn.close()
def get_all(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT id, title, timestamp, orderDate, btc, thumbnail,
buyer, vendor, validation, claim, status, unread, statusChanged FROM cases ''')
ret = cursor.fetchall()
conn.close()
return ret
def update_unread(self, order_id, reset=False):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
if reset is False:
cursor.execute('''UPDATE cases SET unread = unread + 1 WHERE id=?;''', (order_id,))
else:
cursor.execute('''UPDATE cases SET unread=0 WHERE id=?;''', (order_id,))
conn.commit()
conn.close()
def get_claim(self, order_id):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT claim FROM cases WHERE id=?''', (order_id,))
ret = cursor.fetchone()
conn.close()
if not ret:
return None
else:
return ret[0]
def update_status(self, order_id, status):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''UPDATE cases SET status=? WHERE id=?;''', (status, order_id))
conn.commit()
conn.close()
def status_changed(self, order_id, status):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''UPDATE cases SET statusChanged=? WHERE id=?;''', (status, order_id))
conn.commit()
conn.close()
class Ratings(object):
"""
Store ratings for each contract in the db.
"""
def __init__(self, database_path):
self.PATH = database_path
def add_rating(self, listing_hash, rating):
conn = Database.connect_database(self.PATH)
with conn:
rating_id = digest(rating).encode("hex")
cursor = conn.cursor()
cursor.execute('''INSERT INTO ratings(listing, ratingID, rating) VALUES (?,?,?)''',
(listing_hash, rating_id, rating))
conn.commit()
conn.close()
def get_listing_ratings(self, listing_hash, starting_id=None):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
if starting_id is None:
cursor.execute('''SELECT rating FROM ratings WHERE listing=?''', (listing_hash,))
ret = cursor.fetchall()
conn.close()
return ret
else:
cursor.execute('''SELECT rowid FROM ratings WHERE ratingID=?''', (starting_id, ))
row_id = cursor.fetchone()
if row_id is None:
conn.close()
return None
else:
cursor.execute('''SELECT rating FROM ratings WHERE rowid>? AND listing=?''',
(row_id, listing_hash))
ret = cursor.fetchall()
conn.close()
return ret
def get_all_ratings(self, starting_id=None):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
if starting_id is None:
cursor.execute('''SELECT rating FROM ratings''')
ret = cursor.fetchall()
conn.close()
return ret
else:
cursor.execute('''SELECT rowid FROM ratings WHERE ratingID=?''', (starting_id, ))
row_id = cursor.fetchone()
if row_id is None:
conn.close()
return None
else:
cursor.execute('''SELECT rating FROM ratings WHERE rowid>?''', (row_id, ))
ret = cursor.fetchall()
conn.close()
return ret
class Transactions(object):
"""
Store transactions that we broadcast to the network but have yet to confirm.
The transactions should be periodically rebroadcast to ensure they make it in the chain.
"""
def __init__(self, database_path):
self.PATH = database_path
def add_transaction(self, tx):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''INSERT INTO transactions(tx) VALUES (?)''', (tx,))
conn.commit()
conn.close()
def delete_transaction(self, tx):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''DELETE FROM transactions WHERE tx=?''', (tx,))
conn.commit()
conn.close()
def get_transactions(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT tx FROM transactions''')
ret = cursor.fetchall()
conn.close()
return ret
class Settings(object):
"""
Stores the UI settings.
"""
def __init__(self, database_path):
self.PATH = database_path
def update(self, refundAddress, currencyCode, country, language, timeZone, notifications,
shipping_addresses, blocked, terms_conditions, refund_policy, moderator_list, smtp_notifications,
smtp_server, smtp_sender, smtp_recipient, smtp_username, smtp_password):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''INSERT OR REPLACE INTO settings(id, refundAddress, currencyCode, country,
language, timeZone, notifications, shippingAddresses, blocked, termsConditions,
refundPolicy, moderatorList, smtpNotifications, smtpServer, smtpSender,
smtpRecipient, smtpUsername, smtpPassword) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)''',
(1, refundAddress, currencyCode, country, language, timeZone,
notifications, shipping_addresses, blocked, terms_conditions,
refund_policy, moderator_list, smtp_notifications, smtp_server,
smtp_sender, smtp_recipient, smtp_username, smtp_password))
conn.commit()
conn.close()
def get(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT * FROM settings WHERE id=1''')
ret = cursor.fetchone()
conn.close()
return ret
def set_credentials(self, username, password):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
cursor.execute('''INSERT OR REPLACE INTO settings(id, username, password) VALUES (?,?,?)''',
(2, username, password))
conn.commit()
conn.close()
def get_credentials(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT username, password FROM settings WHERE id=2''')
ret = cursor.fetchone()
conn.close()
return ret
class ShoppingEvents(object):
"""
Stores audit events for shoppers on your storefront
"""
def __init__(self, database_path):
self.PATH = database_path
def set(self, shopper_guid, action_id, contract_hash=None):
conn = Database.connect_database(self.PATH)
with conn:
cursor = conn.cursor()
timestamp = int(time.time())
if not contract_hash:
contract_hash = ''
cursor.execute('''INSERT INTO audit_shopping(shopper_guid, timestamp, contract_hash, action_id) VALUES
(?,?,?,?)''', (shopper_guid, timestamp, contract_hash, action_id))
conn.commit()
conn.close()
def get(self):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT * FROM audit_shopping''')
ret = cursor.fetchall()
conn.close()
return ret
def get_events_by_id(self, event_id):
conn = Database.connect_database(self.PATH)
cursor = conn.cursor()
cursor.execute('''SELECT * FROM audit_shopping WHERE event_id=?''', event_id)
ret = cursor.fetchall()
conn.close()
return ret
| {
"repo_name": "OpenBazaar/OpenBazaar-Server",
"path": "db/datastore.py",
"copies": "4",
"size": "53001",
"license": "mit",
"hash": -4290268272370639000,
"line_mean": 36.1155462185,
"line_max": 116,
"alpha_frac": 0.5718382672,
"autogenerated": false,
"ratio": 4.331208629566071,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0012922567746578052,
"num_lines": 1428
} |
__author__ = 'chris'
import socket
import nacl.signing
import nacl.hash
import time
from config import SEEDS
from dht.node import Node
from dht.utils import digest
from interfaces import MessageProcessor, Multiplexer, ConnectionHandler
from log import Logger
from net.dos import BanScore
from protos.message import Message, PING, NOT_FOUND
from protos.objects import FULL_CONE
from random import shuffle
from twisted.internet import task, reactor
from twisted.internet.task import LoopingCall
from txrudp.connection import HandlerFactory, Handler, State
from txrudp.crypto_connection import CryptoConnectionFactory
from txrudp.rudp import ConnectionMultiplexer
from zope.interface.verify import verifyObject
from zope.interface import implements
class OpenBazaarProtocol(ConnectionMultiplexer):
"""
A protocol extending the txrudp datagram protocol. This is the main protocol
which gets passed into the twisted UDPServer. It handles the setup and tear down
of all connections, parses messages coming off the wire and passes them off to
the appropriate classes for processing.
"""
implements(Multiplexer)
def __init__(self, db, ip_address, nat_type, testnet=False, relaying=False):
"""
Initialize the new protocol with the connection handler factory.
Args:
ip_address: a `tuple` of the (ip address, port) of ths node.
"""
self.ip_address = ip_address
self.testnet = testnet
self.ws = None
self.blockchain = None
self.processors = []
self.relay_node = None
self.nat_type = nat_type
self.vendors = db.vendors.get_vendors()
self.ban_score = BanScore(self)
self.factory = self.ConnHandlerFactory(self.processors, nat_type, self.relay_node, self.ban_score)
self.log = Logger(system=self)
self.keep_alive_loop = LoopingCall(self.keep_alive)
self.keep_alive_loop.start(30, now=False)
ConnectionMultiplexer.__init__(self, CryptoConnectionFactory(self.factory), self.ip_address[0], relaying)
class ConnHandler(Handler):
implements(ConnectionHandler)
def __init__(self, processors, nat_type, relay_node, ban_score, *args, **kwargs):
super(OpenBazaarProtocol.ConnHandler, self).__init__(*args, **kwargs)
self.log = Logger(system=self)
self.processors = processors
self.connection = None
self.node = None
self.relay_node = relay_node
self.ban_score = ban_score
self.addr = None
self.is_new_node = True
self.on_connection_made()
self.time_last_message = 0
self.remote_node_version = 1
self.ping_interval = 30 if nat_type != FULL_CONE else 300
def on_connection_made(self):
if self.connection is None or self.connection.state == State.CONNECTING:
return task.deferLater(reactor, .1, self.on_connection_made)
if self.connection.state == State.CONNECTED:
self.addr = str(self.connection.dest_addr[0]) + ":" + str(self.connection.dest_addr[1])
self.log.info("connected to %s" % self.addr)
def receive_message(self, datagram):
if len(datagram) < 166:
self.log.warning("received datagram too small from %s, ignoring" % self.addr)
return False
try:
m = Message()
m.ParseFromString(datagram)
self.node = Node(m.sender.guid,
m.sender.nodeAddress.ip,
m.sender.nodeAddress.port,
m.sender.publicKey,
None if not m.sender.HasField("relayAddress") else
(m.sender.relayAddress.ip, m.sender.relayAddress.port),
m.sender.natType,
m.sender.vendor)
self.remote_node_version = m.protoVer
if self.time_last_message == 0:
h = nacl.hash.sha512(m.sender.publicKey)
pow_hash = h[40:]
if int(pow_hash[:6], 16) >= 50 or m.sender.guid.encode("hex") != h[:40]:
raise Exception('Invalid GUID')
for processor in self.processors:
if m.command in processor or m.command == NOT_FOUND:
processor.receive_message(m, self.node, self.connection, self.ban_score)
if m.command != PING:
self.time_last_message = time.time()
except Exception:
# If message isn't formatted property then ignore
self.log.warning("received an invalid message from %s, ignoring" % self.addr)
return False
def handle_shutdown(self):
try:
self.connection.unregister()
except Exception:
pass
if self.node is None:
self.node = Node(digest("null"), str(self.connection.dest_addr[0]),
int(self.connection.dest_addr[1]))
for processor in self.processors:
processor.timeout(self.node)
if self.addr:
self.log.info("connection with %s terminated" % self.addr)
if self.relay_node == (self.connection.dest_addr[0], self.connection.dest_addr[1]):
self.log.info("Disconnected from relay node. Picking new one...")
self.change_relay_node()
def keep_alive(self):
"""
Let's check that this node has been active in the last 5 minutes. If not
and if it's not in our routing table, we don't need to keep the connection
open. Otherwise PING it to make sure the NAT doesn't drop the mapping.
"""
t = time.time()
router = self.processors[0].router
if (
self.node is not None and
t - self.time_last_message >= 300 and
router.isNewNode(self.node) and
self.relay_node != (self.connection.dest_addr[0], self.connection.dest_addr[1])
):
self.connection.shutdown()
return
if t - self.time_last_message >= self.ping_interval:
for processor in self.processors:
if PING in processor and self.node is not None:
processor.callPing(self.node)
def change_relay_node(self):
potential_relay_nodes = []
for bucket in self.processors[0].router.buckets:
for node in bucket.nodes.values():
if node.nat_type == FULL_CONE:
potential_relay_nodes.append((node.ip, node.port))
if len(potential_relay_nodes) == 0:
for seed in SEEDS:
try:
potential_relay_nodes.append((socket.gethostbyname(seed[0].split(":")[0]),
28469 if self.processors[0].TESTNET else 18469))
except socket.gaierror:
pass
shuffle(potential_relay_nodes)
self.relay_node = potential_relay_nodes[0]
for processor in self.processors:
if PING in processor:
if (self.relay_node[0], self.relay_node[1]) in processor.multiplexer:
processor.multiplexer[(self.relay_node[0], self.relay_node[1])].shutdown()
processor.callPing(Node(digest("null"), self.relay_node[0], self.relay_node[1],
relay_node=None, nat_type=FULL_CONE))
def check_new_connection(self):
if self.is_new_node:
self.is_new_node = False
return True
else:
return False
class ConnHandlerFactory(HandlerFactory):
def __init__(self, processors, nat_type, relay_node, ban_score):
super(OpenBazaarProtocol.ConnHandlerFactory, self).__init__()
self.processors = processors
self.nat_type = nat_type
self.relay_node = relay_node
self.ban_score = ban_score
def make_new_handler(self, *args, **kwargs):
return OpenBazaarProtocol.ConnHandler(self.processors, self.nat_type, self.relay_node, self.ban_score)
def register_processor(self, processor):
"""Add a new class which implements the `MessageProcessor` interface."""
if verifyObject(MessageProcessor, processor):
self.processors.append(processor)
def unregister_processor(self, processor):
"""Unregister the given processor."""
if processor in self.processors:
self.processors.remove(processor)
def set_servers(self, ws, blockchain):
self.ws = ws
self.blockchain = blockchain
def keep_alive(self):
for connection in self.values():
if connection.state == State.CONNECTED:
connection.handler.keep_alive()
def send_message(self, datagram, address, relay_addr):
"""
Sends a datagram over the wire to the given address. It will create a new rudp connection if one
does not already exist for this peer.
Args:
datagram: the raw data to send over the wire
address: a `tuple` of (ip address, port) of the recipient.
relay_addr: a `tuple` of (ip address, port) of the relay address
or `None` if no relaying is required.
"""
if address not in self:
con = self.make_new_connection(self.ip_address, address, relay_addr)
else:
con = self[address]
if relay_addr is not None and relay_addr != con.relay_addr and relay_addr != con.own_addr:
con.set_relay_address(relay_addr)
con.send_message(datagram)
| {
"repo_name": "saltduck/OpenBazaar-Server",
"path": "net/wireprotocol.py",
"copies": "5",
"size": "10124",
"license": "mit",
"hash": -4295640857107002400,
"line_mean": 42.8268398268,
"line_max": 114,
"alpha_frac": 0.5776372975,
"autogenerated": false,
"ratio": 4.337617823479006,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0016140264358656437,
"num_lines": 231
} |
__author__ = 'chris'
import time
import json
import os.path
import nacl.signing
import nacl.hash
import nacl.encoding
import nacl.utils
from nacl.public import PrivateKey, PublicKey, Box
from dht import node
from twisted.internet import defer, reactor, task
from market.protocol import MarketProtocol
from dht.utils import digest, deferredDict
from constants import DATA_FOLDER
from protos import objects
from db.datastore import FollowData
from market.profile import Profile
from collections import OrderedDict
from binascii import hexlify, unhexlify
class Server(object):
def __init__(self, kserver, signing_key):
"""
A high level class for sending direct, market messages to other nodes.
A node will need one of these to participate in buying and selling.
Should be initialized after the Kademlia server.
"""
self.kserver = kserver
self.signing_key = signing_key
self.router = kserver.protocol.router
self.protocol = MarketProtocol(kserver.node.getProto(), self.router, signing_key)
def get_contract(self, node_to_ask, contract_hash):
"""
Will query the given node to fetch a contract given its hash.
If the returned contract doesn't have the same hash, it will return None.
After acquiring the contract it will download all the associated images if it
does not already have them in cache.
Args:
node_to_ask: a `dht.node.Node` object containing an ip and port
contract_hash: a 20 byte hash in raw byte format
"""
def get_result(result):
if digest(result[1][0]) == contract_hash:
contract = json.loads(result[1][0], object_pairs_hook=OrderedDict)
try:
signature = contract["vendor_offer"]["signatures"]["guid"]
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(json.dumps(contract["vendor_offer"]["listing"], indent=4),
unhexlify(signature))
except Exception:
return None
self.cache(result[1][0])
if "image_hashes" in contract["vendor"]["listing"]["item"]:
for image_hash in contract["vendor"]["listing"]["item"]["image_hashes"]:
self.get_image(node_to_ask, unhexlify(image_hash))
return contract
else:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
d = self.protocol.callGetContract(node_to_ask, contract_hash)
return d.addCallback(get_result)
def get_image(self, node_to_ask, image_hash):
"""
Will query the given node to fetch an image given its hash.
If the returned image doesn't have the same hash, it will return None.
Args:
node_to_ask: a `dht.node.Node` object containing an ip and port
image_hash: a 20 byte hash in raw byte format
"""
def get_result(result):
if digest(result[1][0]) == image_hash:
self.cache(result[1][0])
return result[1][0]
else:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
d = self.protocol.callGetImage(node_to_ask, image_hash)
return d.addCallback(get_result)
def get_profile(self, node_to_ask):
"""
Downloads the profile from the given node. If the images do not already
exist in cache, it will download and cache them before returning the profile.
"""
def get_result(result):
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(result[1][1] + result[1][0])
p = objects.Profile()
p.ParseFromString(result[1][0])
if not os.path.isfile(DATA_FOLDER + 'cache/' + hexlify(p.avatar_hash)):
self.get_image(node_to_ask, p.avatar_hash)
if not os.path.isfile(DATA_FOLDER + 'cache/' + hexlify(p.header_hash)):
self.get_image(node_to_ask, p.header_hash)
return p
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
d = self.protocol.callGetProfile(node_to_ask)
return d.addCallback(get_result)
def get_user_metadata(self, node_to_ask):
"""
Downloads just a small portion of the profile (containing the name, handle,
and avatar hash). We need this for some parts of the UI where we list stores.
Since we need fast loading we shouldn't download the full profile here.
It will download the avatar if it isn't already in cache.
"""
def get_result(result):
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(result[1][1] + result[1][0])
m = objects.Metadata()
m.ParseFromString(result[1][0])
if not os.path.isfile(DATA_FOLDER + 'cache/' + hexlify(m.avatar_hash)):
self.get_image(node_to_ask, m.avatar_hash)
return m
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
d = self.protocol.callGetUserMetadata(node_to_ask)
return d.addCallback(get_result)
def get_listings(self, node_to_ask):
"""
Queries a store for it's list of contracts. A `objects.Listings` protobuf
is returned containing some metadata for each contract. The individual contracts
should be fetched with a get_contract call.
"""
def get_result(result):
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(result[1][1] + result[1][0])
l = objects.Listings()
l.ParseFromString(result[1][0])
return l
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
d = self.protocol.callGetListings(node_to_ask)
return d.addCallback(get_result)
def get_contract_metadata(self, node_to_ask, contract_hash):
"""
Downloads just the metadata for the contract. Useful for displaying
search results in a list view without downloading the entire contract.
It will download the thumbnail image if it isn't already in cache.
"""
def get_result(result):
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(result[1][1] + result[1][0])
l = objects.Listings().ListingMetadata()
l.ParseFromString(result[1][0])
if l.HasField("thumbnail_hash"):
if not os.path.isfile(DATA_FOLDER + 'cache/' + hexlify(l.thumbnail_hash)):
self.get_image(node_to_ask, l.thumbnail_hash)
return l
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
d = self.protocol.callGetContractMetadata(node_to_ask, contract_hash)
return d.addCallback(get_result)
def get_moderators(self):
"""
Retrieves moderator list from the dht. Each node is queried
to get metadata and ensure it's alive for usage.
"""
def parse_response(moderators):
if moderators is None:
return None
def parse_profiles(responses):
for k, v in responses.items():
if v is None:
del responses[k]
return responses
ds = {}
for mod in moderators:
try:
val = objects.Value()
val.ParseFromString(mod)
n = objects.Node()
n.ParseFromString(val.serializedData)
ds[val.serializedData] = self.get_profile(node.Node(n.guid, n.ip, n.port, n.signedPublicKey))
except Exception:
pass
return deferredDict(ds).addCallback(parse_profiles)
return self.kserver.get("moderators").addCallback(parse_response)
def make_moderator(self):
"""
Set self as a moderator in the DHT.
"""
proto = self.kserver.node.getProto().SerializeToString()
self.kserver.set("moderators", digest(proto), proto)
def unmake_moderator(self):
"""
Deletes our moderator entry from the network.
"""
key = digest(self.kserver.node.getProto().SerializeToString())
signature = self.signing_key.sign(key)[:64]
self.kserver.delete("moderators", key, signature)
def follow(self, node_to_follow):
"""
Sends a follow message to another node in the network. The node must be online
to receive the message. The message contains a signed, serialized `Follower`
protobuf object which the recipient will store and can send to other nodes,
proving you are following them. The response is a signed `Metadata` protobuf
that will store in the db.
"""
def save_to_db(result):
if result[0] and result[1][0] == "True":
try:
u = objects.Following.User()
u.guid = node_to_follow.id
u.signed_pubkey = node_to_follow.signed_pubkey
m = objects.Metadata()
m.ParseFromString(result[1][1])
u.metadata.MergeFrom(m)
u.signature = result[1][2]
pubkey = node_to_follow.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(result[1][1], result[1][2])
FollowData().follow(u)
return True
except Exception:
return False
else:
return False
proto = Profile().get(False)
m = objects.Metadata()
m.name = proto.name
m.handle = proto.handle
m.avatar_hash = proto.avatar_hash
m.nsfw = proto.nsfw
f = objects.Followers.Follower()
f.guid = self.kserver.node.id
f.following = node_to_follow.id
f.signed_pubkey = self.kserver.node.signed_pubkey
f.metadata.MergeFrom(m)
signature = self.signing_key.sign(f.SerializeToString())[:64]
d = self.protocol.callFollow(node_to_follow, f.SerializeToString(), signature)
return d.addCallback(save_to_db)
def unfollow(self, node_to_unfollow):
"""
Sends an unfollow message to a node and removes them from our db.
"""
def save_to_db(result):
if result[0] and result[1][0] == "True":
FollowData().unfollow(node_to_unfollow.id)
return True
else:
return False
signature = self.signing_key.sign("unfollow:" + node_to_unfollow.id)[:64]
d = self.protocol.callUnfollow(node_to_unfollow, signature)
return d.addCallback(save_to_db)
def get_followers(self, node_to_ask):
"""
Query the given node for a list if its followers. The response will be a
`Followers` protobuf object. We will verify the signature for each follower
to make sure that node really did follower this user.
"""
def get_response(response):
# Verify the signature on the response
f = objects.Followers()
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(response[1][1] + response[1][0])
f.ParseFromString(response[1][0])
except Exception:
return None
# Verify the signature and guid of each follower.
for follower in f.followers:
try:
v_key = nacl.signing.VerifyKey(follower.signed_pubkey[64:])
signature = follower.signature
follower.ClearField("signature")
v_key.verify(follower.SerializeToString(), signature)
h = nacl.hash.sha512(follower.signed_pubkey)
pow_hash = h[64:128]
if int(pow_hash[:6], 16) >= 50 or hexlify(follower.guid) != h[:40]:
raise Exception('Invalid GUID')
if follower.following != node_to_ask.id:
raise Exception('Invalid follower')
except Exception:
f.followers.remove(follower)
return f
d = self.protocol.callGetFollowers(node_to_ask)
return d.addCallback(get_response)
def get_following(self, node_to_ask):
"""
Query the given node for a list of users it's following. The return
is `Following` protobuf object that contains signed metadata for each
user this node is following. The signature on the metadata is there to
prevent this node from altering the name/handle/avatar associated with
the guid.
"""
def get_response(response):
# Verify the signature on the response
f = objects.Following()
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(response[1][1] + response[1][0])
f.ParseFromString(response[1][0])
except Exception:
return None
for user in f.users:
try:
v_key = nacl.signing.VerifyKey(user.signed_pubkey[64:])
signature = user.signature
v_key.verify(user.metadata.SerializeToString(), signature)
h = nacl.hash.sha512(user.signed_pubkey)
pow_hash = h[64:128]
if int(pow_hash[:6], 16) >= 50 or hexlify(user.guid) != h[:40]:
raise Exception('Invalid GUID')
except Exception:
f.users.remove(user)
return f
d = self.protocol.callGetFollowing(node_to_ask)
return d.addCallback(get_response)
def send_notification(self, message):
"""
Sends a notification message to all online followers. It will resolve
each guid before sending the notification. Messages must be less than
140 characters. Returns the number of followers the notification reached.
"""
if len(message) > 140:
return defer.succeed(0)
def send(nodes):
def how_many_reached(responses):
count = 0
for resp in responses:
if resp[1][0] and resp[1][1][0] == "True":
count += 1
return count
ds = []
signature = self.signing_key.sign(str(message))[:64]
for n in nodes:
if n[1] is not None:
ds.append(self.protocol.callNotify(n[1], message, signature))
return defer.DeferredList(ds).addCallback(how_many_reached)
dl = []
f = objects.Followers()
f.ParseFromString(FollowData().get_followers())
for follower in f.followers:
dl.append(self.kserver.resolve(follower.guid))
return defer.DeferredList(dl).addCallback(send)
def send_message(self, receiving_node, public_key, message_type, message, subject=None):
"""
Sends a message to another node. If the node isn't online it
will be placed in the dht for the node to pick up later.
"""
pro = Profile().get()
if len(message) > 1500:
return
p = objects.Plaintext_Message()
p.sender_guid = self.kserver.node.id
p.signed_pubkey = self.kserver.node.signed_pubkey
p.encryption_pubkey = PrivateKey(self.signing_key.encode()).public_key.encode()
p.type = message_type
p.message = message
if subject is not None:
p.subject = subject
if pro.handle:
p.handle = pro.handle
if pro.avatar_hash:
p.avatar_hash = pro.avatar_hash
p.timestamp = int(time.time())
signature = self.signing_key.sign(p.SerializeToString())[:64]
p.signature = signature
skephem = PrivateKey.generate()
pkephem = skephem.public_key.encode(nacl.encoding.RawEncoder)
box = Box(skephem, PublicKey(public_key, nacl.encoding.HexEncoder))
nonce = nacl.utils.random(Box.NONCE_SIZE)
ciphertext = box.encrypt(p.SerializeToString(), nonce)
def get_response(response):
if not response[0]:
self.kserver.set(receiving_node.id, pkephem, ciphertext)
self.protocol.callMessage(receiving_node, pkephem, ciphertext).addCallback(get_response)
def get_messages(self, listener):
# if the transport hasn't been initialized yet, wait a second
if self.protocol.multiplexer is None or self.protocol.multiplexer.transport is None:
return task.deferLater(reactor, 1, self.get_messages, listener)
def parse_messages(messages):
if messages is not None:
for message in messages:
try:
value = objects.Value()
value.ParseFromString(message)
try:
box = Box(PrivateKey(self.signing_key.encode()), PublicKey(value.valueKey))
ciphertext = value.serializedData
plaintext = box.decrypt(ciphertext)
p = objects.Plaintext_Message()
p.ParseFromString(plaintext)
signature = p.signature
p.ClearField("signature")
verify_key = nacl.signing.VerifyKey(p.signed_pubkey[64:])
verify_key.verify(p.SerializeToString(), signature)
h = nacl.hash.sha512(p.signed_pubkey)
pow_hash = h[64:128]
if int(pow_hash[:6], 16) >= 50 or hexlify(p.sender_guid) != h[:40]:
raise Exception('Invalid guid')
listener.notify(p.sender_guid, p.encryption_pubkey, p.subject,
objects.Plaintext_Message.Type.Name(p.type), p.message)
except Exception:
pass
signature = self.signing_key.sign(value.valueKey)[:64]
self.kserver.delete(self.kserver.node.id, value.valueKey, signature)
except Exception:
pass
self.kserver.get(self.kserver.node.id).addCallback(parse_messages)
@staticmethod
def cache(filename):
"""
Saves the file to a cache folder if it doesn't already exist.
"""
if not os.path.isfile(DATA_FOLDER + "cache/" + digest(filename).encode("hex")):
with open(DATA_FOLDER + "cache/" + digest(filename).encode("hex"), 'w') as outfile:
outfile.write(filename)
| {
"repo_name": "eXcomm/OpenBazaar-Server",
"path": "market/network.py",
"copies": "2",
"size": "20020",
"license": "mit",
"hash": 8622630961626333000,
"line_mean": 40.5352697095,
"line_max": 113,
"alpha_frac": 0.5605894106,
"autogenerated": false,
"ratio": 4.367364746945899,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5927954157545899,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import time
import json
import os.path
import nacl.signing
import nacl.hash
import nacl.encoding
import nacl.utils
import gnupg
from nacl.public import PrivateKey, PublicKey, Box
from dht import node
from twisted.internet import defer, reactor, task
from market.protocol import MarketProtocol
from dht.utils import digest, deferredDict
from constants import DATA_FOLDER
from protos import objects
from db.datastore import FollowData
from market.profile import Profile
from collections import OrderedDict
from binascii import hexlify, unhexlify
class Server(object):
def __init__(self, kserver, signing_key):
"""
A high level class for sending direct, market messages to other nodes.
A node will need one of these to participate in buying and selling.
Should be initialized after the Kademlia server.
"""
self.kserver = kserver
self.signing_key = signing_key
self.router = kserver.protocol.router
self.protocol = MarketProtocol(kserver.node.getProto(), self.router, signing_key)
def get_contract(self, node_to_ask, contract_hash):
"""
Will query the given node to fetch a contract given its hash.
If the returned contract doesn't have the same hash, it will return None.
After acquiring the contract it will download all the associated images if it
does not already have them in cache.
Args:
node_to_ask: a `dht.node.Node` object containing an ip and port
contract_hash: a 20 byte hash in raw byte format
"""
def get_result(result):
if digest(result[1][0]) == contract_hash:
contract = json.loads(result[1][0], object_pairs_hook=OrderedDict)
try:
signature = contract["vendor_offer"]["signature"]
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(json.dumps(contract["vendor_offer"]["listing"], indent=4),
unhexlify(signature))
except Exception:
return None
self.cache(result[1][0])
if "image_hashes" in contract["vendor_offer"]["listing"]["item"]:
for image_hash in contract["vendor_offer"]["listing"]["item"]["image_hashes"]:
self.get_image(node_to_ask, unhexlify(image_hash))
return contract
else:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
d = self.protocol.callGetContract(node_to_ask, contract_hash)
return d.addCallback(get_result)
def get_image(self, node_to_ask, image_hash):
"""
Will query the given node to fetch an image given its hash.
If the returned image doesn't have the same hash, it will return None.
Args:
node_to_ask: a `dht.node.Node` object containing an ip and port
image_hash: a 20 byte hash in raw byte format
"""
def get_result(result):
if digest(result[1][0]) == image_hash:
self.cache(result[1][0])
return result[1][0]
else:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
d = self.protocol.callGetImage(node_to_ask, image_hash)
return d.addCallback(get_result)
def get_profile(self, node_to_ask):
"""
Downloads the profile from the given node. If the images do not already
exist in cache, it will download and cache them before returning the profile.
"""
def get_result(result):
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(result[1][1] + result[1][0])
p = objects.Profile()
p.ParseFromString(result[1][0])
if p.pgp_key:
gpg = gnupg.GPG()
gpg.import_keys(p.pgp_key.publicKey)
if not gpg.verify(p.pgp_key.signature) or \
node_to_ask.id.encode('hex') not in p.pgp_key.signature:
p.ClearField("pgp_key")
if not os.path.isfile(DATA_FOLDER + 'cache/' + hexlify(p.avatar_hash)):
self.get_image(node_to_ask, p.avatar_hash)
if not os.path.isfile(DATA_FOLDER + 'cache/' + hexlify(p.header_hash)):
self.get_image(node_to_ask, p.header_hash)
return p
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
d = self.protocol.callGetProfile(node_to_ask)
return d.addCallback(get_result)
def get_user_metadata(self, node_to_ask):
"""
Downloads just a small portion of the profile (containing the name, handle,
and avatar hash). We need this for some parts of the UI where we list stores.
Since we need fast loading we shouldn't download the full profile here.
It will download the avatar if it isn't already in cache.
"""
def get_result(result):
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(result[1][1] + result[1][0])
m = objects.Metadata()
m.ParseFromString(result[1][0])
if not os.path.isfile(DATA_FOLDER + 'cache/' + hexlify(m.avatar_hash)):
self.get_image(node_to_ask, m.avatar_hash)
return m
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
d = self.protocol.callGetUserMetadata(node_to_ask)
return d.addCallback(get_result)
def get_listings(self, node_to_ask):
"""
Queries a store for it's list of contracts. A `objects.Listings` protobuf
is returned containing some metadata for each contract. The individual contracts
should be fetched with a get_contract call.
"""
def get_result(result):
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(result[1][1] + result[1][0])
l = objects.Listings()
l.ParseFromString(result[1][0])
return l
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
d = self.protocol.callGetListings(node_to_ask)
return d.addCallback(get_result)
def get_contract_metadata(self, node_to_ask, contract_hash):
"""
Downloads just the metadata for the contract. Useful for displaying
search results in a list view without downloading the entire contract.
It will download the thumbnail image if it isn't already in cache.
"""
def get_result(result):
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(result[1][1] + result[1][0])
l = objects.Listings().ListingMetadata()
l.ParseFromString(result[1][0])
if l.HasField("thumbnail_hash"):
if not os.path.isfile(DATA_FOLDER + 'cache/' + hexlify(l.thumbnail_hash)):
self.get_image(node_to_ask, l.thumbnail_hash)
return l
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
d = self.protocol.callGetContractMetadata(node_to_ask, contract_hash)
return d.addCallback(get_result)
def get_moderators(self):
"""
Retrieves moderator list from the dht. Each node is queried
to get metadata and ensure it's alive for usage.
"""
def parse_response(moderators):
if moderators is None:
return None
def parse_profiles(responses):
for k, v in responses.items():
if v is None:
del responses[k]
return responses
ds = {}
for mod in moderators:
try:
val = objects.Value()
val.ParseFromString(mod)
n = objects.Node()
n.ParseFromString(val.serializedData)
ds[val.serializedData] = self.get_profile(node.Node(n.guid, n.ip, n.port, n.signedPublicKey))
except Exception:
pass
return deferredDict(ds).addCallback(parse_profiles)
return self.kserver.get("moderators").addCallback(parse_response)
def make_moderator(self):
"""
Set self as a moderator in the DHT.
"""
proto = self.kserver.node.getProto().SerializeToString()
self.kserver.set("moderators", digest(proto), proto)
def unmake_moderator(self):
"""
Deletes our moderator entry from the network.
"""
key = digest(self.kserver.node.getProto().SerializeToString())
signature = self.signing_key.sign(key)[:64]
self.kserver.delete("moderators", key, signature)
def follow(self, node_to_follow):
"""
Sends a follow message to another node in the network. The node must be online
to receive the message. The message contains a signed, serialized `Follower`
protobuf object which the recipient will store and can send to other nodes,
proving you are following them. The response is a signed `Metadata` protobuf
that will store in the db.
"""
def save_to_db(result):
if result[0] and result[1][0] == "True":
try:
u = objects.Following.User()
u.guid = node_to_follow.id
u.signed_pubkey = node_to_follow.signed_pubkey
m = objects.Metadata()
m.ParseFromString(result[1][1])
u.metadata.MergeFrom(m)
u.signature = result[1][2]
pubkey = node_to_follow.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(result[1][1], result[1][2])
FollowData().follow(u)
return True
except Exception:
return False
else:
return False
proto = Profile().get(False)
m = objects.Metadata()
m.name = proto.name
m.handle = proto.handle
m.avatar_hash = proto.avatar_hash
m.nsfw = proto.nsfw
f = objects.Followers.Follower()
f.guid = self.kserver.node.id
f.following = node_to_follow.id
f.signed_pubkey = self.kserver.node.signed_pubkey
f.metadata.MergeFrom(m)
signature = self.signing_key.sign(f.SerializeToString())[:64]
d = self.protocol.callFollow(node_to_follow, f.SerializeToString(), signature)
return d.addCallback(save_to_db)
def unfollow(self, node_to_unfollow):
"""
Sends an unfollow message to a node and removes them from our db.
"""
def save_to_db(result):
if result[0] and result[1][0] == "True":
FollowData().unfollow(node_to_unfollow.id)
return True
else:
return False
signature = self.signing_key.sign("unfollow:" + node_to_unfollow.id)[:64]
d = self.protocol.callUnfollow(node_to_unfollow, signature)
return d.addCallback(save_to_db)
def get_followers(self, node_to_ask):
"""
Query the given node for a list if its followers. The response will be a
`Followers` protobuf object. We will verify the signature for each follower
to make sure that node really did follower this user.
"""
def get_response(response):
# Verify the signature on the response
f = objects.Followers()
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(response[1][1] + response[1][0])
f.ParseFromString(response[1][0])
except Exception:
return None
# Verify the signature and guid of each follower.
for follower in f.followers:
try:
v_key = nacl.signing.VerifyKey(follower.signed_pubkey[64:])
signature = follower.signature
follower.ClearField("signature")
v_key.verify(follower.SerializeToString(), signature)
h = nacl.hash.sha512(follower.signed_pubkey)
pow_hash = h[64:128]
if int(pow_hash[:6], 16) >= 50 or hexlify(follower.guid) != h[:40]:
raise Exception('Invalid GUID')
if follower.following != node_to_ask.id:
raise Exception('Invalid follower')
except Exception:
f.followers.remove(follower)
return f
d = self.protocol.callGetFollowers(node_to_ask)
return d.addCallback(get_response)
def get_following(self, node_to_ask):
"""
Query the given node for a list of users it's following. The return
is `Following` protobuf object that contains signed metadata for each
user this node is following. The signature on the metadata is there to
prevent this node from altering the name/handle/avatar associated with
the guid.
"""
def get_response(response):
# Verify the signature on the response
f = objects.Following()
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(response[1][1] + response[1][0])
f.ParseFromString(response[1][0])
except Exception:
return None
for user in f.users:
try:
v_key = nacl.signing.VerifyKey(user.signed_pubkey[64:])
signature = user.signature
v_key.verify(user.metadata.SerializeToString(), signature)
h = nacl.hash.sha512(user.signed_pubkey)
pow_hash = h[64:128]
if int(pow_hash[:6], 16) >= 50 or hexlify(user.guid) != h[:40]:
raise Exception('Invalid GUID')
except Exception:
f.users.remove(user)
return f
d = self.protocol.callGetFollowing(node_to_ask)
return d.addCallback(get_response)
def send_notification(self, message):
"""
Sends a notification message to all online followers. It will resolve
each guid before sending the notification. Messages must be less than
140 characters. Returns the number of followers the notification reached.
"""
if len(message) > 140:
return defer.succeed(0)
def send(nodes):
def how_many_reached(responses):
count = 0
for resp in responses:
if resp[1][0] and resp[1][1][0] == "True":
count += 1
return count
ds = []
signature = self.signing_key.sign(str(message))[:64]
for n in nodes:
if n[1] is not None:
ds.append(self.protocol.callNotify(n[1], message, signature))
return defer.DeferredList(ds).addCallback(how_many_reached)
dl = []
f = objects.Followers()
f.ParseFromString(FollowData().get_followers())
for follower in f.followers:
dl.append(self.kserver.resolve(follower.guid))
return defer.DeferredList(dl).addCallback(send)
def send_message(self, receiving_node, public_key, message_type, message, subject=None):
"""
Sends a message to another node. If the node isn't online it
will be placed in the dht for the node to pick up later.
"""
pro = Profile().get()
if len(message) > 1500:
return
p = objects.Plaintext_Message()
p.sender_guid = self.kserver.node.id
p.signed_pubkey = self.kserver.node.signed_pubkey
p.encryption_pubkey = PrivateKey(self.signing_key.encode()).public_key.encode()
p.type = message_type
p.message = message
if subject is not None:
p.subject = subject
if pro.handle:
p.handle = pro.handle
if pro.avatar_hash:
p.avatar_hash = pro.avatar_hash
p.timestamp = int(time.time())
signature = self.signing_key.sign(p.SerializeToString())[:64]
p.signature = signature
skephem = PrivateKey.generate()
pkephem = skephem.public_key.encode(nacl.encoding.RawEncoder)
box = Box(skephem, PublicKey(public_key, nacl.encoding.HexEncoder))
nonce = nacl.utils.random(Box.NONCE_SIZE)
ciphertext = box.encrypt(p.SerializeToString(), nonce)
def get_response(response):
if not response[0]:
self.kserver.set(receiving_node.id, pkephem, ciphertext)
self.protocol.callMessage(receiving_node, pkephem, ciphertext).addCallback(get_response)
def get_messages(self, listener):
# if the transport hasn't been initialized yet, wait a second
if self.protocol.multiplexer is None or self.protocol.multiplexer.transport is None:
return task.deferLater(reactor, 1, self.get_messages, listener)
def parse_messages(messages):
if messages is not None:
for message in messages:
try:
value = objects.Value()
value.ParseFromString(message)
try:
box = Box(PrivateKey(self.signing_key.encode()), PublicKey(value.valueKey))
ciphertext = value.serializedData
plaintext = box.decrypt(ciphertext)
p = objects.Plaintext_Message()
p.ParseFromString(plaintext)
signature = p.signature
p.ClearField("signature")
verify_key = nacl.signing.VerifyKey(p.signed_pubkey[64:])
verify_key.verify(p.SerializeToString(), signature)
h = nacl.hash.sha512(p.signed_pubkey)
pow_hash = h[64:128]
if int(pow_hash[:6], 16) >= 50 or hexlify(p.sender_guid) != h[:40]:
raise Exception('Invalid guid')
listener.notify(p.sender_guid, p.encryption_pubkey, p.subject,
objects.Plaintext_Message.Type.Name(p.type), p.message)
except Exception:
pass
signature = self.signing_key.sign(value.valueKey)[:64]
self.kserver.delete(self.kserver.node.id, value.valueKey, signature)
except Exception:
pass
self.kserver.get(self.kserver.node.id).addCallback(parse_messages)
@staticmethod
def cache(filename):
"""
Saves the file to a cache folder if it doesn't already exist.
"""
if not os.path.isfile(DATA_FOLDER + "cache/" + digest(filename).encode("hex")):
with open(DATA_FOLDER + "cache/" + digest(filename).encode("hex"), 'w') as outfile:
outfile.write(filename)
| {
"repo_name": "the9ull/OpenBazaar-Server",
"path": "market/network.py",
"copies": "2",
"size": "20366",
"license": "mit",
"hash": 2315440727360444000,
"line_mean": 40.6482617587,
"line_max": 113,
"alpha_frac": 0.5586762251,
"autogenerated": false,
"ratio": 4.351709401709401,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0010780080744005754,
"num_lines": 489
} |
__author__ = 'chris'
import time
import json
import os.path
import nacl.signing
import nacl.hash
import nacl.encoding
import nacl.utils
import gnupg
import bitcoin
from dht.node import Node
from nacl.public import PrivateKey, PublicKey, Box
from twisted.internet import defer, reactor, task
from market.protocol import MarketProtocol
from dht.utils import digest
from constants import DATA_FOLDER
from protos import objects
from market.profile import Profile
from market.contracts import Contract
from collections import OrderedDict
from binascii import hexlify, unhexlify
from keyutils.keys import KeyChain
class Server(object):
def __init__(self, kserver, signing_key, database):
"""
A high level class for sending direct, market messages to other nodes.
A node will need one of these to participate in buying and selling.
Should be initialized after the Kademlia server.
"""
self.kserver = kserver
self.signing_key = signing_key
self.router = kserver.protocol.router
self.db = database
self.protocol = MarketProtocol(kserver.node.getProto(), self.router, signing_key, database)
def get_contract(self, node_to_ask, contract_hash):
"""
Will query the given node to fetch a contract given its hash.
If the returned contract doesn't have the same hash, it will return None.
After acquiring the contract it will download all the associated images if it
does not already have them in cache.
Args:
node_to_ask: a `dht.node.Node` object containing an ip and port
contract_hash: a 20 byte hash in raw byte format
"""
def get_result(result):
if digest(result[1][0]) == contract_hash:
contract = json.loads(result[1][0], object_pairs_hook=OrderedDict)
try:
signature = contract["vendor_offer"]["signature"]
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(json.dumps(contract["vendor_offer"]["listing"], indent=4),
unhexlify(signature))
for moderator in contract["vendor_offer"]["listing"]["moderators"]:
guid = moderator["guid"]
guid_key = moderator["pubkeys"]["signing"]["key"]
guid_sig = moderator["pubkeys"]["signing"]["signature"]
enc_key = moderator["pubkeys"]["encryption"]["key"]
enc_sig = moderator["pubkeys"]["encryption"]["signature"]
bitcoin_key = moderator["pubkeys"]["bitcoin"]["key"]
bitcoin_sig = moderator["pubkeys"]["bitcoin"]["signature"]
h = nacl.hash.sha512(unhexlify(guid_sig) + unhexlify(guid_key))
pow_hash = h[64:128]
if int(pow_hash[:6], 16) >= 50 or guid != h[:40]:
raise Exception('Invalid GUID')
verify_key = nacl.signing.VerifyKey(guid_key, encoder=nacl.encoding.HexEncoder)
verify_key.verify(unhexlify(enc_key), unhexlify(enc_sig))
verify_key.verify(unhexlify(bitcoin_key), unhexlify(bitcoin_sig))
# should probably also validate the handle here.
except Exception:
return None
self.cache(result[1][0])
if "image_hashes" in contract["vendor_offer"]["listing"]["item"]:
for image_hash in contract["vendor_offer"]["listing"]["item"]["image_hashes"]:
self.get_image(node_to_ask, unhexlify(image_hash))
return contract
else:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
d = self.protocol.callGetContract(node_to_ask, contract_hash)
return d.addCallback(get_result)
def get_image(self, node_to_ask, image_hash):
"""
Will query the given node to fetch an image given its hash.
If the returned image doesn't have the same hash, it will return None.
Args:
node_to_ask: a `dht.node.Node` object containing an ip and port
image_hash: a 20 byte hash in raw byte format
"""
def get_result(result):
if digest(result[1][0]) == image_hash:
self.cache(result[1][0])
return result[1][0]
else:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
d = self.protocol.callGetImage(node_to_ask, image_hash)
return d.addCallback(get_result)
def get_profile(self, node_to_ask):
"""
Downloads the profile from the given node. If the images do not already
exist in cache, it will download and cache them before returning the profile.
"""
def get_result(result):
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(result[1][1] + result[1][0])
p = objects.Profile()
p.ParseFromString(result[1][0])
if p.pgp_key.public_key:
gpg = gnupg.GPG()
gpg.import_keys(p.pgp_key.publicKey)
if not gpg.verify(p.pgp_key.signature) or \
node_to_ask.id.encode('hex') not in p.pgp_key.signature:
p.ClearField("pgp_key")
if not os.path.isfile(DATA_FOLDER + 'cache/' + hexlify(p.avatar_hash)):
self.get_image(node_to_ask, p.avatar_hash)
if not os.path.isfile(DATA_FOLDER + 'cache/' + hexlify(p.header_hash)):
self.get_image(node_to_ask, p.header_hash)
return p
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
d = self.protocol.callGetProfile(node_to_ask)
return d.addCallback(get_result)
def get_user_metadata(self, node_to_ask):
"""
Downloads just a small portion of the profile (containing the name, handle,
and avatar hash). We need this for some parts of the UI where we list stores.
Since we need fast loading we shouldn't download the full profile here.
It will download the avatar if it isn't already in cache.
"""
def get_result(result):
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(result[1][1] + result[1][0])
m = objects.Metadata()
m.ParseFromString(result[1][0])
if not os.path.isfile(DATA_FOLDER + 'cache/' + hexlify(m.avatar_hash)):
self.get_image(node_to_ask, m.avatar_hash)
return m
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
d = self.protocol.callGetUserMetadata(node_to_ask)
return d.addCallback(get_result)
def get_listings(self, node_to_ask):
"""
Queries a store for it's list of contracts. A `objects.Listings` protobuf
is returned containing some metadata for each contract. The individual contracts
should be fetched with a get_contract call.
"""
def get_result(result):
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(result[1][1] + result[1][0])
l = objects.Listings()
l.ParseFromString(result[1][0])
return l
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
d = self.protocol.callGetListings(node_to_ask)
return d.addCallback(get_result)
def get_contract_metadata(self, node_to_ask, contract_hash):
"""
Downloads just the metadata for the contract. Useful for displaying
search results in a list view without downloading the entire contract.
It will download the thumbnail image if it isn't already in cache.
"""
def get_result(result):
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(result[1][1] + result[1][0])
l = objects.Listings().ListingMetadata()
l.ParseFromString(result[1][0])
if l.HasField("thumbnail_hash"):
if not os.path.isfile(DATA_FOLDER + 'cache/' + hexlify(l.thumbnail_hash)):
self.get_image(node_to_ask, l.thumbnail_hash)
return l
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
d = self.protocol.callGetContractMetadata(node_to_ask, contract_hash)
return d.addCallback(get_result)
def make_moderator(self):
"""
Set self as a moderator in the DHT.
"""
u = objects.Profile()
k = u.PublicKey()
k.public_key = bitcoin.bip32_deserialize(KeyChain(self.db).bitcoin_master_pubkey)[5]
k.signature = self.signing_key.sign(k.public_key)[:64]
u.bitcoin_key.MergeFrom(k)
u.moderator = True
Profile(self.db).update(u)
proto = self.kserver.node.getProto().SerializeToString()
self.kserver.set(digest("moderators"), digest(proto), proto)
def unmake_moderator(self):
"""
Deletes our moderator entry from the network.
"""
key = digest(self.kserver.node.getProto().SerializeToString())
signature = self.signing_key.sign(key)[:64]
self.kserver.delete("moderators", key, signature)
Profile(self.db).remove_field("moderator")
def follow(self, node_to_follow):
"""
Sends a follow message to another node in the network. The node must be online
to receive the message. The message contains a signed, serialized `Follower`
protobuf object which the recipient will store and can send to other nodes,
proving you are following them. The response is a signed `Metadata` protobuf
that will store in the db.
"""
def save_to_db(result):
if result[0] and result[1][0] == "True":
try:
u = objects.Following.User()
u.guid = node_to_follow.id
u.signed_pubkey = node_to_follow.signed_pubkey
m = objects.Metadata()
m.ParseFromString(result[1][1])
u.metadata.MergeFrom(m)
u.signature = result[1][2]
pubkey = node_to_follow.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(result[1][1], result[1][2])
self.db.FollowData().follow(u)
return True
except Exception:
return False
else:
return False
proto = Profile(self.db).get(False)
m = objects.Metadata()
m.name = proto.name
m.handle = proto.handle
m.avatar_hash = proto.avatar_hash
m.nsfw = proto.nsfw
f = objects.Followers.Follower()
f.guid = self.kserver.node.id
f.following = node_to_follow.id
f.signed_pubkey = self.kserver.node.signed_pubkey
f.metadata.MergeFrom(m)
signature = self.signing_key.sign(f.SerializeToString())[:64]
d = self.protocol.callFollow(node_to_follow, f.SerializeToString(), signature)
return d.addCallback(save_to_db)
def unfollow(self, node_to_unfollow):
"""
Sends an unfollow message to a node and removes them from our db.
"""
def save_to_db(result):
if result[0] and result[1][0] == "True":
self.db.FollowData().unfollow(node_to_unfollow.id)
return True
else:
return False
signature = self.signing_key.sign("unfollow:" + node_to_unfollow.id)[:64]
d = self.protocol.callUnfollow(node_to_unfollow, signature)
return d.addCallback(save_to_db)
def get_followers(self, node_to_ask):
"""
Query the given node for a list if its followers. The response will be a
`Followers` protobuf object. We will verify the signature for each follower
to make sure that node really did follower this user.
"""
def get_response(response):
# Verify the signature on the response
f = objects.Followers()
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(response[1][1] + response[1][0])
f.ParseFromString(response[1][0])
except Exception:
return None
# Verify the signature and guid of each follower.
for follower in f.followers:
try:
v_key = nacl.signing.VerifyKey(follower.signed_pubkey[64:])
signature = follower.signature
follower.ClearField("signature")
v_key.verify(follower.SerializeToString(), signature)
h = nacl.hash.sha512(follower.signed_pubkey)
pow_hash = h[64:128]
if int(pow_hash[:6], 16) >= 50 or hexlify(follower.guid) != h[:40]:
raise Exception('Invalid GUID')
if follower.following != node_to_ask.id:
raise Exception('Invalid follower')
except Exception:
f.followers.remove(follower)
return f
d = self.protocol.callGetFollowers(node_to_ask)
return d.addCallback(get_response)
def get_following(self, node_to_ask):
"""
Query the given node for a list of users it's following. The return
is `Following` protobuf object that contains signed metadata for each
user this node is following. The signature on the metadata is there to
prevent this node from altering the name/handle/avatar associated with
the guid.
"""
def get_response(response):
# Verify the signature on the response
f = objects.Following()
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(response[1][1] + response[1][0])
f.ParseFromString(response[1][0])
except Exception:
return None
for user in f.users:
try:
v_key = nacl.signing.VerifyKey(user.signed_pubkey[64:])
signature = user.signature
v_key.verify(user.metadata.SerializeToString(), signature)
h = nacl.hash.sha512(user.signed_pubkey)
pow_hash = h[64:128]
if int(pow_hash[:6], 16) >= 50 or hexlify(user.guid) != h[:40]:
raise Exception('Invalid GUID')
except Exception:
f.users.remove(user)
return f
d = self.protocol.callGetFollowing(node_to_ask)
return d.addCallback(get_response)
def send_notification(self, message):
"""
Sends a notification message to all online followers. It will resolve
each guid before sending the notification. Messages must be less than
140 characters. Returns the number of followers the notification reached.
"""
if len(message) > 140:
return defer.succeed(0)
def send(nodes):
def how_many_reached(responses):
count = 0
for resp in responses:
if resp[1][0] and resp[1][1][0] == "True":
count += 1
return count
ds = []
signature = self.signing_key.sign(str(message))[:64]
for n in nodes:
if n[1] is not None:
ds.append(self.protocol.callNotify(n[1], message, signature))
return defer.DeferredList(ds).addCallback(how_many_reached)
dl = []
f = objects.Followers()
f.ParseFromString(self.db.FollowData().get_followers())
for follower in f.followers:
dl.append(self.kserver.resolve(follower.guid))
return defer.DeferredList(dl).addCallback(send)
def send_message(self, receiving_node, public_key, message_type, message, subject=None, store_only=False):
"""
Sends a message to another node. If the node isn't online it
will be placed in the dht for the node to pick up later.
"""
pro = Profile(self.db).get()
if len(message) > 1500:
return
p = objects.Plaintext_Message()
p.sender_guid = self.kserver.node.id
p.signed_pubkey = self.kserver.node.signed_pubkey
p.encryption_pubkey = PrivateKey(self.signing_key.encode()).public_key.encode()
p.type = message_type
p.message = message
if subject is not None:
p.subject = subject
if pro.handle:
p.handle = pro.handle
if pro.avatar_hash:
p.avatar_hash = pro.avatar_hash
p.timestamp = int(time.time())
signature = self.signing_key.sign(p.SerializeToString())[:64]
p.signature = signature
skephem = PrivateKey.generate()
pkephem = skephem.public_key.encode(nacl.encoding.RawEncoder)
box = Box(skephem, PublicKey(public_key, nacl.encoding.HexEncoder))
nonce = nacl.utils.random(Box.NONCE_SIZE)
ciphertext = box.encrypt(p.SerializeToString(), nonce)
def get_response(response):
if not response[0]:
self.kserver.set(digest(receiving_node.id), pkephem, ciphertext)
if not store_only:
self.protocol.callMessage(receiving_node, pkephem, ciphertext).addCallback(get_response)
else:
get_response([False])
def get_messages(self, listener):
# if the transport hasn't been initialized yet, wait a second
if self.protocol.multiplexer is None or self.protocol.multiplexer.transport is None:
return task.deferLater(reactor, 1, self.get_messages, listener)
def parse_messages(messages):
if messages is not None:
for message in messages:
try:
value = objects.Value()
value.ParseFromString(message)
try:
box = Box(PrivateKey(self.signing_key.encode()), PublicKey(value.valueKey))
ciphertext = value.serializedData
plaintext = box.decrypt(ciphertext)
p = objects.Plaintext_Message()
p.ParseFromString(plaintext)
signature = p.signature
p.ClearField("signature")
verify_key = nacl.signing.VerifyKey(p.signed_pubkey[64:])
verify_key.verify(p.SerializeToString(), signature)
h = nacl.hash.sha512(p.signed_pubkey)
pow_hash = h[64:128]
if int(pow_hash[:6], 16) >= 50 or hexlify(p.sender_guid) != h[:40]:
raise Exception('Invalid guid')
if p.type == objects.Plaintext_Message.Type.Value("ORDER_CONFIRMATION"):
c = Contract(self.db, hash_value=unhexlify(p.subject))
c.accept_order_confirmation(self.protocol.multiplexer.ws,
confirmation_json=p.message)
else:
listener.notify(p.sender_guid, p.encryption_pubkey, p.subject,
objects.Plaintext_Message.Type.Name(p.type), p.message)
except Exception:
pass
signature = self.signing_key.sign(value.valueKey)[:64]
self.kserver.delete(self.kserver.node.id, value.valueKey, signature)
except Exception:
pass
self.kserver.get(self.kserver.node.id).addCallback(parse_messages)
def purchase(self, node_to_ask, contract):
"""
Send an order message to the vendor.
Args:
node_to_ask: a `dht.node.Node` object
contract: a complete `Contract` object containing the buyer's order
"""
def parse_response(response):
try:
address = contract.contract["buyer_order"]["order"]["payment"]["address"]
verify_key = nacl.signing.VerifyKey(node_to_ask.signed_pubkey[64:])
verify_key.verify(str(address), response[1][0])
return response[1][0]
except Exception:
return False
public_key = contract.contract["vendor_offer"]["listing"]["id"]["pubkeys"]["encryption"]
skephem = PrivateKey.generate()
pkephem = skephem.public_key.encode(nacl.encoding.RawEncoder)
box = Box(skephem, PublicKey(public_key, nacl.encoding.HexEncoder))
nonce = nacl.utils.random(Box.NONCE_SIZE)
ciphertext = box.encrypt(json.dumps(contract.contract, indent=4), nonce)
d = self.protocol.callOrder(node_to_ask, pkephem, ciphertext)
return d.addCallback(parse_response)
def confirm_order(self, guid, contract):
"""
Send the order confirmation over to the buyer. If the buyer isn't
online we will stick it in the DHT temporarily.
"""
def get_node(node_to_ask):
def parse_response(response):
if response[0] and response[1][0] == "True":
return True
elif not response[0]:
contract_dict = json.loads(json.dumps(contract.contract, indent=4),
object_pairs_hook=OrderedDict)
del contract_dict["vendor_order_confirmation"]
order_id = digest(json.dumps(contract_dict, indent=4)).encode("hex")
self.send_message(Node(unhexlify(guid)),
contract.contract["buyer_order"]["order"]["id"]["pubkeys"]["encryption"],
objects.Plaintext_Message.Type.Value("ORDER_CONFIRMATION"),
json.dumps(contract.contract["vendor_order_confirmation"]),
order_id,
store_only=True)
return True
else:
return False
if node_to_ask:
public_key = contract.contract["buyer_order"]["order"]["id"]["pubkeys"]["encryption"]
skephem = PrivateKey.generate()
pkephem = skephem.public_key.encode(nacl.encoding.RawEncoder)
box = Box(skephem, PublicKey(public_key, nacl.encoding.HexEncoder))
nonce = nacl.utils.random(Box.NONCE_SIZE)
ciphertext = box.encrypt(json.dumps(contract.contract, indent=4), nonce)
d = self.protocol.callOrderConfirmation(node_to_ask, pkephem, ciphertext)
return d.addCallback(parse_response)
else:
return parse_response([False])
return self.kserver.resolve(unhexlify(guid)).addCallback(get_node)
@staticmethod
def cache(filename):
"""
Saves the file to a cache folder if it doesn't already exist.
"""
if not os.path.isfile(DATA_FOLDER + "cache/" + digest(filename).encode("hex")):
with open(DATA_FOLDER + "cache/" + digest(filename).encode("hex"), 'w') as outfile:
outfile.write(filename)
| {
"repo_name": "Joaz/OpenBazaar-Server",
"path": "market/network.py",
"copies": "3",
"size": "24799",
"license": "mit",
"hash": 1297824455299638000,
"line_mean": 43.4426523297,
"line_max": 111,
"alpha_frac": 0.5562724303,
"autogenerated": false,
"ratio": 4.34156162464986,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.639783405494986,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
import time
import json
import os.path
import nacl.signing
import nacl.hash
import nacl.encoding
import nacl.utils
import gnupg
import bitcoin
import httplib
from dht.node import Node
from nacl.public import PrivateKey, PublicKey, Box
from twisted.internet import defer, reactor, task
from market.protocol import MarketProtocol
from dht.utils import digest
from constants import DATA_FOLDER
from protos import objects
from market.profile import Profile
from market.contracts import Contract
from collections import OrderedDict
from binascii import unhexlify
from keyutils.keys import KeyChain
from keyutils.bip32utils import derive_childkey
from log import Logger
from seed import peers
class Server(object):
def __init__(self, kserver, signing_key, database):
"""
A high level class for sending direct, market messages to other nodes.
A node will need one of these to participate in buying and selling.
Should be initialized after the Kademlia server.
"""
self.kserver = kserver
self.signing_key = signing_key
self.router = kserver.protocol.router
self.db = database
self.log = Logger(system=self)
self.protocol = MarketProtocol(kserver.node, self.router, signing_key, database)
# TODO: we need a loop here that republishes keywords when they are about to expire
# TODO: we also need a loop here to delete expiring contract (if they are set to expire)
def querySeed(self, list_seed_pubkey):
"""
Query an HTTP seed for known vendors and save the vendors to the db.
Args:
Receives a list of one or more tuples Example [(seed, pubkey)]
seed: A `string` consisting of "ip:port" or "hostname:port"
pubkey: The hex encoded public key to verify the signature on the response
"""
for sp in list_seed_pubkey:
seed, pubkey = sp
try:
self.log.debug("querying %s for vendors" % seed)
c = httplib.HTTPConnection(seed)
c.request("GET", "/?type=vendors")
response = c.getresponse()
self.log.debug("Http response from %s: %s, %s" % (seed, response.status, response.reason))
data = response.read()
reread_data = data.decode("zlib")
proto = peers.PeerSeeds()
proto.ParseFromString(reread_data)
v = self.db.VendorStore()
for peer in proto.peer_data:
p = peers.PeerData()
p.ParseFromString(peer)
v.save_vendor(p.guid.encode("hex"), p.ip_address, p.port, p.signedPubkey)
verify_key = nacl.signing.VerifyKey(pubkey, encoder=nacl.encoding.HexEncoder)
verify_key.verify("".join(proto.peer_data), proto.signature)
except Exception, e:
self.log.error("failed to query seed: %s" % str(e))
def get_contract(self, node_to_ask, contract_hash):
"""
Will query the given node to fetch a contract given its hash.
If the returned contract doesn't have the same hash, it will return None.
After acquiring the contract it will download all the associated images if it
does not already have them in cache.
Args:
node_to_ask: a `dht.node.Node` object containing an ip and port
contract_hash: a 20 byte hash in raw byte format
"""
def get_result(result):
try:
if result[0] and digest(result[1][0]) == contract_hash:
contract = json.loads(result[1][0], object_pairs_hook=OrderedDict)
# TODO: verify the guid in the contract matches this node's guid
signature = contract["vendor_offer"]["signature"]
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(json.dumps(contract["vendor_offer"]["listing"], indent=4),
unhexlify(signature))
if "moderators" in contract["vendor_offer"]["listing"]:
for moderator in contract["vendor_offer"]["listing"]["moderators"]:
guid = moderator["guid"]
guid_key = moderator["pubkeys"]["signing"]["key"]
guid_sig = moderator["pubkeys"]["signing"]["signature"]
enc_key = moderator["pubkeys"]["encryption"]["key"]
enc_sig = moderator["pubkeys"]["encryption"]["signature"]
bitcoin_key = moderator["pubkeys"]["bitcoin"]["key"]
bitcoin_sig = moderator["pubkeys"]["bitcoin"]["signature"]
h = nacl.hash.sha512(unhexlify(guid_sig) + unhexlify(guid_key))
pow_hash = h[64:128]
if int(pow_hash[:6], 16) >= 50 or guid != h[:40]:
raise Exception('Invalid GUID')
verify_key = nacl.signing.VerifyKey(guid_key, encoder=nacl.encoding.HexEncoder)
verify_key.verify(unhexlify(enc_key), unhexlify(enc_sig))
verify_key.verify(unhexlify(bitcoin_key), unhexlify(bitcoin_sig))
# should probably also validate the handle here.
self.cache(result[1][0])
if "image_hashes" in contract["vendor_offer"]["listing"]["item"]:
for image_hash in contract["vendor_offer"]["listing"]["item"]["image_hashes"]:
self.get_image(node_to_ask, unhexlify(image_hash))
return contract
else:
return None
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
self.log.info("fetching contract %s from %s" % (contract_hash.encode("hex"), node_to_ask))
d = self.protocol.callGetContract(node_to_ask, contract_hash)
return d.addCallback(get_result)
def get_image(self, node_to_ask, image_hash):
"""
Will query the given node to fetch an image given its hash.
If the returned image doesn't have the same hash, it will return None.
Args:
node_to_ask: a `dht.node.Node` object containing an ip and port
image_hash: a 20 byte hash in raw byte format
"""
def get_result(result):
try:
if result[0] and digest(result[1][0]) == image_hash:
self.cache(result[1][0])
return result[1][0]
else:
return None
except Exception:
return None
if node_to_ask.ip is None or len(image_hash) != 20:
return defer.succeed(None)
self.log.info("fetching image %s from %s" % (image_hash.encode("hex"), node_to_ask))
d = self.protocol.callGetImage(node_to_ask, image_hash)
return d.addCallback(get_result)
def get_profile(self, node_to_ask):
"""
Downloads the profile from the given node. If the images do not already
exist in cache, it will download and cache them before returning the profile.
"""
def get_result(result):
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(result[1][1] + result[1][0])
p = objects.Profile()
p.ParseFromString(result[1][0])
if p.pgp_key.public_key:
gpg = gnupg.GPG()
gpg.import_keys(p.pgp_key.publicKey)
if not gpg.verify(p.pgp_key.signature) or \
node_to_ask.id.encode('hex') not in p.pgp_key.signature:
p.ClearField("pgp_key")
if not os.path.isfile(DATA_FOLDER + 'cache/' + p.avatar_hash.encode("hex")):
self.get_image(node_to_ask, p.avatar_hash)
if not os.path.isfile(DATA_FOLDER + 'cache/' + p.header_hash.encode("hex")):
self.get_image(node_to_ask, p.header_hash)
return p
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
self.log.info("fetching profile from %s" % node_to_ask)
d = self.protocol.callGetProfile(node_to_ask)
return d.addCallback(get_result)
def get_user_metadata(self, node_to_ask):
"""
Downloads just a small portion of the profile (containing the name, handle,
and avatar hash). We need this for some parts of the UI where we list stores.
Since we need fast loading we shouldn't download the full profile here.
It will download the avatar if it isn't already in cache.
"""
def get_result(result):
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(result[1][1] + result[1][0])
m = objects.Metadata()
m.ParseFromString(result[1][0])
if not os.path.isfile(DATA_FOLDER + 'cache/' + m.avatar_hash.encode("hex")):
self.get_image(node_to_ask, m.avatar_hash)
return m
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
self.log.info("fetching user metadata from %s" % node_to_ask)
d = self.protocol.callGetUserMetadata(node_to_ask)
return d.addCallback(get_result)
def get_listings(self, node_to_ask):
"""
Queries a store for it's list of contracts. A `objects.Listings` protobuf
is returned containing some metadata for each contract. The individual contracts
should be fetched with a get_contract call.
"""
def get_result(result):
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(result[1][1] + result[1][0])
l = objects.Listings()
l.ParseFromString(result[1][0])
return l
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
self.log.info("fetching store listings from %s" % node_to_ask)
d = self.protocol.callGetListings(node_to_ask)
return d.addCallback(get_result)
def get_contract_metadata(self, node_to_ask, contract_hash):
"""
Downloads just the metadata for the contract. Useful for displaying
search results in a list view without downloading the entire contract.
It will download the thumbnail image if it isn't already in cache.
"""
def get_result(result):
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(result[1][1] + result[1][0])
l = objects.Listings().ListingMetadata()
l.ParseFromString(result[1][0])
if l.HasField("thumbnail_hash"):
if not os.path.isfile(DATA_FOLDER + 'cache/' + l.thumbnail_hash.encode("hex")):
self.get_image(node_to_ask, l.thumbnail_hash)
return l
except Exception:
return None
if node_to_ask.ip is None:
return defer.succeed(None)
self.log.info("fetching metadata for contract %s from %s" % (contract_hash.encode("hex"), node_to_ask))
d = self.protocol.callGetContractMetadata(node_to_ask, contract_hash)
return d.addCallback(get_result)
def make_moderator(self):
"""
Set self as a moderator in the DHT.
"""
u = objects.Profile()
k = u.PublicKey()
k.public_key = bitcoin.bip32_deserialize(KeyChain(self.db).bitcoin_master_pubkey)[5]
k.signature = self.signing_key.sign(k.public_key)[:64]
u.bitcoin_key.MergeFrom(k)
u.moderator = True
Profile(self.db).update(u)
proto = self.kserver.node.getProto().SerializeToString()
self.kserver.set(digest("moderators"), digest(proto), proto)
self.log.info("setting self as moderator on the network")
def unmake_moderator(self):
"""
Deletes our moderator entry from the network.
"""
key = digest(self.kserver.node.getProto().SerializeToString())
signature = self.signing_key.sign(key)[:64]
self.kserver.delete("moderators", key, signature)
Profile(self.db).remove_field("moderator")
self.log.info("removing self as moderator from the network")
def follow(self, node_to_follow):
"""
Sends a follow message to another node in the network. The node must be online
to receive the message. The message contains a signed, serialized `Follower`
protobuf object which the recipient will store and can send to other nodes,
proving you are following them. The response is a signed `Metadata` protobuf
that will store in the db.
"""
def save_to_db(result):
if result[0] and result[1][0] == "True":
try:
u = objects.Following.User()
u.guid = node_to_follow.id
u.signed_pubkey = node_to_follow.signed_pubkey
m = objects.Metadata()
m.ParseFromString(result[1][1])
u.metadata.MergeFrom(m)
u.signature = result[1][2]
pubkey = node_to_follow.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(result[1][1], result[1][2])
self.db.FollowData().follow(u)
return True
except Exception:
return False
else:
return False
proto = Profile(self.db).get(False)
m = objects.Metadata()
m.name = proto.name
m.handle = proto.handle
m.avatar_hash = proto.avatar_hash
m.nsfw = proto.nsfw
f = objects.Followers.Follower()
f.guid = self.kserver.node.id
f.following = node_to_follow.id
f.signed_pubkey = self.kserver.node.signed_pubkey
f.metadata.MergeFrom(m)
signature = self.signing_key.sign(f.SerializeToString())[:64]
d = self.protocol.callFollow(node_to_follow, f.SerializeToString(), signature)
self.log.info("sending follow request to %s" % node_to_follow)
return d.addCallback(save_to_db)
def unfollow(self, node_to_unfollow):
"""
Sends an unfollow message to a node and removes them from our db.
"""
def save_to_db(result):
try:
if result[0] and result[1][0] == "True":
self.db.FollowData().unfollow(node_to_unfollow.id)
return True
else:
return False
except Exception:
return False
signature = self.signing_key.sign("unfollow:" + node_to_unfollow.id)[:64]
d = self.protocol.callUnfollow(node_to_unfollow, signature)
self.log.info("sending unfollow request to %s" % node_to_unfollow)
return d.addCallback(save_to_db)
def get_followers(self, node_to_ask):
"""
Query the given node for a list if its followers. The response will be a
`Followers` protobuf object. We will verify the signature for each follower
to make sure that node really did follower this user.
"""
def get_response(response):
# Verify the signature on the response
f = objects.Followers()
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(response[1][1] + response[1][0])
f.ParseFromString(response[1][0])
except Exception:
return None
# Verify the signature and guid of each follower.
for follower in f.followers:
try:
v_key = nacl.signing.VerifyKey(follower.signed_pubkey[64:])
signature = follower.signature
follower.ClearField("signature")
v_key.verify(follower.SerializeToString(), signature)
h = nacl.hash.sha512(follower.signed_pubkey)
pow_hash = h[64:128]
if int(pow_hash[:6], 16) >= 50 or follower.guid.encode("hex") != h[:40]:
raise Exception('Invalid GUID')
if follower.following != node_to_ask.id:
raise Exception('Invalid follower')
except Exception:
f.followers.remove(follower)
return f
d = self.protocol.callGetFollowers(node_to_ask)
self.log.info("fetching followers from %s" % node_to_ask)
return d.addCallback(get_response)
def get_following(self, node_to_ask):
"""
Query the given node for a list of users it's following. The return
is `Following` protobuf object that contains signed metadata for each
user this node is following. The signature on the metadata is there to
prevent this node from altering the name/handle/avatar associated with
the guid.
"""
def get_response(response):
# Verify the signature on the response
f = objects.Following()
try:
pubkey = node_to_ask.signed_pubkey[64:]
verify_key = nacl.signing.VerifyKey(pubkey)
verify_key.verify(response[1][1] + response[1][0])
f.ParseFromString(response[1][0])
except Exception:
return None
for user in f.users:
try:
v_key = nacl.signing.VerifyKey(user.signed_pubkey[64:])
signature = user.signature
v_key.verify(user.metadata.SerializeToString(), signature)
h = nacl.hash.sha512(user.signed_pubkey)
pow_hash = h[64:128]
if int(pow_hash[:6], 16) >= 50 or user.guid.encode("hex") != h[:40]:
raise Exception('Invalid GUID')
except Exception:
f.users.remove(user)
return f
d = self.protocol.callGetFollowing(node_to_ask)
self.log.info("fetching following list from %s" % node_to_ask)
return d.addCallback(get_response)
def broadcast(self, message):
"""
Sends a broadcast message to all online followers. It will resolve
each guid before sending the broadcast. Messages must be less than
140 characters. Returns the number of followers the broadcast reached.
"""
if len(message) > 140:
return defer.succeed(0)
def send(nodes):
def how_many_reached(responses):
count = 0
for resp in responses:
if resp[1][0] and resp[1][1][0] == "True":
count += 1
return count
ds = []
signature = self.signing_key.sign(str(message))[:64]
for n in nodes:
if n[1] is not None:
ds.append(self.protocol.callBroadcast(n[1], message, signature))
return defer.DeferredList(ds).addCallback(how_many_reached)
dl = []
f = objects.Followers()
f.ParseFromString(self.db.FollowData().get_followers())
for follower in f.followers:
dl.append(self.kserver.resolve(follower.guid))
self.log.info("broadcasting %s to followers" % message)
return defer.DeferredList(dl).addCallback(send)
def send_message(self, receiving_node, public_key, message_type, message, subject=None, store_only=False):
"""
Sends a message to another node. If the node isn't online it
will be placed in the dht for the node to pick up later.
"""
pro = Profile(self.db).get()
p = objects.Plaintext_Message()
p.sender_guid = self.kserver.node.id
p.signed_pubkey = self.kserver.node.signed_pubkey
p.encryption_pubkey = PrivateKey(self.signing_key.encode()).public_key.encode()
p.type = message_type
p.message = message
if subject is not None:
p.subject = subject
if pro.handle:
p.handle = pro.handle
if pro.avatar_hash:
p.avatar_hash = pro.avatar_hash
p.timestamp = int(time.time())
signature = self.signing_key.sign(p.SerializeToString())[:64]
p.signature = signature
skephem = PrivateKey.generate()
pkephem = skephem.public_key.encode(nacl.encoding.RawEncoder)
box = Box(skephem, PublicKey(public_key, nacl.encoding.HexEncoder))
nonce = nacl.utils.random(Box.NONCE_SIZE)
ciphertext = box.encrypt(p.SerializeToString(), nonce)
def get_response(response):
if not response[0]:
ciphertext = box.encrypt(p.SerializeToString().encode("zlib"), nonce)
self.kserver.set(digest(receiving_node.id), pkephem, ciphertext)
self.log.info("sending encrypted message to %s" % receiving_node.id.encode("hex"))
if not store_only:
self.protocol.callMessage(receiving_node, pkephem, ciphertext).addCallback(get_response)
else:
get_response([False])
def get_messages(self, listener):
# if the transport hasn't been initialized yet, wait a second
if self.protocol.multiplexer is None or self.protocol.multiplexer.transport is None:
return task.deferLater(reactor, 1, self.get_messages, listener)
def parse_messages(messages):
if messages is not None:
self.log.info("retrieved %s message(s) from the dht" % len(messages))
for message in messages:
try:
value = objects.Value()
value.ParseFromString(message)
try:
box = Box(PrivateKey(self.signing_key.encode()), PublicKey(value.valueKey))
ciphertext = value.serializedData
plaintext = box.decrypt(ciphertext).decode("zlib")
p = objects.Plaintext_Message()
p.ParseFromString(plaintext)
signature = p.signature
p.ClearField("signature")
verify_key = nacl.signing.VerifyKey(p.signed_pubkey[64:])
verify_key.verify(p.SerializeToString(), signature)
h = nacl.hash.sha512(p.signed_pubkey)
pow_hash = h[64:128]
if int(pow_hash[:6], 16) >= 50 or p.sender_guid.encode("hex") != h[:40]:
raise Exception('Invalid guid')
if p.type == objects.Plaintext_Message.Type.Value("ORDER_CONFIRMATION"):
c = Contract(self.db, hash_value=p.subject)
c.accept_order_confirmation(self.protocol.get_notification_listener(),
confirmation_json=p.message)
elif p.type == objects.Plaintext_Message.Type.Value("RECEIPT"):
c = Contract(self.db, hash_value=p.subject)
c.accept_receipt(self.protocol.get_notification_listener(),
self.protocol.multiplexer.blockchain,
receipt_json=p.message)
else:
listener.notify(p, signature)
except Exception:
pass
signature = self.signing_key.sign(value.valueKey)[:64]
self.kserver.delete(self.kserver.node.id, value.valueKey, signature)
except Exception:
pass
self.kserver.get(self.kserver.node.id).addCallback(parse_messages)
def purchase(self, node_to_ask, contract):
"""
Send an order message to the vendor.
Args:
node_to_ask: a `dht.node.Node` object
contract: a complete `Contract` object containing the buyer's order
"""
def parse_response(response):
try:
address = contract.contract["buyer_order"]["order"]["payment"]["address"]
chaincode = contract.contract["buyer_order"]["order"]["payment"]["chaincode"]
masterkey_b = contract.contract["buyer_order"]["order"]["id"]["pubkeys"]["bitcoin"]
buyer_key = derive_childkey(masterkey_b, chaincode)
amount = contract.contract["buyer_order"]["order"]["payment"]["amount"]
listing_hash = contract.contract["buyer_order"]["order"]["ref_hash"]
verify_key = nacl.signing.VerifyKey(node_to_ask.signed_pubkey[64:])
verify_key.verify(
str(address) + str(amount) + str(listing_hash) + str(buyer_key), response[1][0])
return response[1][0]
except Exception:
return False
public_key = contract.contract["vendor_offer"]["listing"]["id"]["pubkeys"]["encryption"]
skephem = PrivateKey.generate()
pkephem = skephem.public_key.encode(nacl.encoding.RawEncoder)
box = Box(skephem, PublicKey(public_key, nacl.encoding.HexEncoder))
nonce = nacl.utils.random(Box.NONCE_SIZE)
ciphertext = box.encrypt(json.dumps(contract.contract, indent=4), nonce)
d = self.protocol.callOrder(node_to_ask, pkephem, ciphertext)
self.log.info("purchasing contract %s from %s" % (contract.get_contract_id().encode("hex"), node_to_ask))
return d.addCallback(parse_response)
def confirm_order(self, guid, contract):
"""
Send the order confirmation over to the buyer. If the buyer isn't
online we will stick it in the DHT temporarily.
"""
def get_node(node_to_ask):
def parse_response(response):
if response[0] and response[1][0] == "True":
return True
elif not response[0]:
contract_dict = json.loads(json.dumps(contract.contract, indent=4),
object_pairs_hook=OrderedDict)
del contract_dict["vendor_order_confirmation"]
order_id = digest(json.dumps(contract_dict, indent=4)).encode("hex")
self.send_message(Node(unhexlify(guid)),
contract.contract["buyer_order"]["order"]["id"]["pubkeys"]["encryption"],
objects.Plaintext_Message.Type.Value("ORDER_CONFIRMATION"),
json.dumps(contract.contract["vendor_order_confirmation"]),
order_id,
store_only=True)
return True
else:
return False
if node_to_ask:
public_key = contract.contract["buyer_order"]["order"]["id"]["pubkeys"]["encryption"]
skephem = PrivateKey.generate()
pkephem = skephem.public_key.encode(nacl.encoding.RawEncoder)
box = Box(skephem, PublicKey(public_key, nacl.encoding.HexEncoder))
nonce = nacl.utils.random(Box.NONCE_SIZE)
ciphertext = box.encrypt(json.dumps(contract.contract, indent=4), nonce)
d = self.protocol.callOrderConfirmation(node_to_ask, pkephem, ciphertext)
return d.addCallback(parse_response)
else:
return parse_response([False])
self.log.info("sending order confirmation to %s" % guid)
return self.kserver.resolve(unhexlify(guid)).addCallback(get_node)
def complete_order(self, guid, contract):
"""
Send the receipt, including the payout signatures and ratings, over to the vendor.
If the vendor isn't online we will stick it in the DHT temporarily.
"""
def get_node(node_to_ask):
def parse_response(response):
if response[0] and response[1][0] == "True":
return True
elif not response[0]:
contract_dict = json.loads(json.dumps(contract.contract, indent=4),
object_pairs_hook=OrderedDict)
del contract_dict["vendor_order_confirmation"]
del contract_dict["buyer_receipt"]
order_id = digest(json.dumps(contract_dict, indent=4)).encode("hex")
self.send_message(Node(unhexlify(guid)),
contract.contract["vendor_offer"]["listing"]["id"]["pubkeys"]["encryption"],
objects.Plaintext_Message.Type.Value("RECEIPT"),
json.dumps(contract.contract["buyer_receipt"]),
order_id,
store_only=True)
return True
else:
return False
if node_to_ask:
public_key = contract.contract["vendor_offer"]["listing"]["id"]["pubkeys"]["encryption"]
skephem = PrivateKey.generate()
pkephem = skephem.public_key.encode(nacl.encoding.RawEncoder)
box = Box(skephem, PublicKey(public_key, nacl.encoding.HexEncoder))
nonce = nacl.utils.random(Box.NONCE_SIZE)
ciphertext = box.encrypt(json.dumps(contract.contract, indent=4), nonce)
d = self.protocol.callCompleteOrder(node_to_ask, pkephem, ciphertext)
return d.addCallback(parse_response)
else:
return parse_response([False])
self.log.info("sending order receipt to %s" % guid)
return self.kserver.resolve(unhexlify(guid)).addCallback(get_node)
def open_dispute(self, order_id, claim):
"""
Given and order ID we will pull the contract from disk and send it along with the claim
to both the moderator and other party to the dispute. If either party isn't online we will stick
it in the DHT for them.
"""
try:
file_path = DATA_FOLDER + "purchases/in progress" + order_id + ".json"
with open(file_path, 'r') as filename:
contract = json.load(filename, object_pairs_hook=OrderedDict)
guid = contract["vendor_offer"]["listing"]["id"]["guid"]
enc_key = contract["vendor_offer"]["listing"]["id"]["pubkeys"]["encryption"]
except Exception:
try:
file_path = DATA_FOLDER + "sales/in progress/" + order_id + ".json"
with open(file_path, 'r') as filename:
contract = json.load(filename, object_pairs_hook=OrderedDict)
guid = contract["buyer_order"]["order"]["id"]["guid"]
enc_key = contract["buyer_order"]["order"]["id"]["pubkeys"]["encryption"]
except Exception:
return False
contract_dict = contract
if "vendor_order_confirmation" in contract_dict:
del contract_dict["vendor_order_confirmation"]
order_id = digest(json.dumps(contract_dict, indent=4)).encode("hex")
contract["dispute_claim"] = claim
mod_guid = contract["buyer_order"]["order"]["moderator"]
for mod in contract["vendor_offer"]["listing"]["moderators"]:
if mod["guid"] == mod_guid:
mod_enc_key = mod["pubkeys"]["encryption"]["key"]
def get_node(node_to_ask, recipient_guid, public_key):
def parse_response(response):
if not response[0]:
self.send_message(Node(unhexlify(recipient_guid)),
public_key,
objects.Plaintext_Message.Type.Value("DISPUTE"),
contract,
order_id,
store_only=True)
if node_to_ask:
skephem = PrivateKey.generate()
pkephem = skephem.public_key.encode(nacl.encoding.RawEncoder)
box = Box(skephem, PublicKey(public_key, nacl.encoding.HexEncoder))
nonce = nacl.utils.random(Box.NONCE_SIZE)
ciphertext = box.encrypt(json.dumps(contract, indent=4), nonce)
d = self.protocol.callDisputeOpen(node_to_ask, pkephem, ciphertext)
return d.addCallback(parse_response)
else:
return parse_response([False])
self.kserver.resolve(unhexlify(guid)).addCallback(get_node, guid, enc_key)
self.kserver.resolve(unhexlify(mod_guid)).addCallback(get_node, mod_guid, mod_enc_key)
@staticmethod
def cache(filename):
"""
Saves the file to a cache folder if it doesn't already exist.
"""
if not os.path.isfile(DATA_FOLDER + "cache/" + digest(filename).encode("hex")):
with open(DATA_FOLDER + "cache/" + digest(filename).encode("hex"), 'wb') as outfile:
outfile.write(filename)
| {
"repo_name": "hauxir/OpenBazaar-Server",
"path": "market/network.py",
"copies": "1",
"size": "34484",
"license": "mit",
"hash": -6173321182968483000,
"line_mean": 46.1737346101,
"line_max": 114,
"alpha_frac": 0.5539960561,
"autogenerated": false,
"ratio": 4.336519114688128,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5390515170788128,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Chris'
import wx
from gooey.gui.util import wx_util
class CalendarDlg(wx.Dialog):
def __init__(self, parent):
wx.Dialog.__init__(self, parent)
self.SetBackgroundColour('#ffffff')
self.ok_button = wx.Button(self, label='Ok')
self.datepicker = wx.DatePickerCtrl(self, style=wx.DP_DROPDOWN)
vertical_container = wx.BoxSizer(wx.VERTICAL)
vertical_container.AddSpacer(10)
vertical_container.Add(wx_util.h1(self, label='Select a Date'), 0, wx.LEFT | wx.RIGHT, 15)
vertical_container.AddSpacer(10)
vertical_container.Add(self.datepicker, 0, wx.EXPAND | wx.LEFT | wx.RIGHT, 15)
vertical_container.AddSpacer(10)
button_sizer = wx.BoxSizer(wx.HORIZONTAL)
button_sizer.AddStretchSpacer(1)
button_sizer.Add(self.ok_button, 0)
vertical_container.Add(button_sizer, 0, wx.LEFT | wx.RIGHT, 15)
vertical_container.AddSpacer(20)
self.SetSizerAndFit(vertical_container)
self.Bind(wx.EVT_BUTTON, self.OnOkButton, self.ok_button)
def OnOkButton(self, event):
self.Close()
return wx.ID_OK
def OnCancellButton(self, event):
try:
return None
except:
self.Close()
def GetPath(self):
return str(self.datepicker.GetValue()).split(' ')[0]
| {
"repo_name": "lrq3000/pyFileFixity",
"path": "pyFileFixity/lib/gooey/gui/widgets/calender_dialog.py",
"copies": "1",
"size": "1246",
"license": "mit",
"hash": -2305105028090844200,
"line_mean": 26.6888888889,
"line_max": 94,
"alpha_frac": 0.6869983949,
"autogenerated": false,
"ratio": 3.138539042821159,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4325537437721159,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
"""
Just using this class for testing the DHT for now.
We will fit the actual implementation in where appropriate.
"""
import pickle
import stun
from twisted.internet import reactor
from twisted.python import log
from os.path import expanduser
from bitcoin import *
from txjsonrpc.netstring import jsonrpc
from guidutils.guid import GUID
from dht.utils import digest
from dht.network import Server
from dht.node import Node
from wireprotocol import OpenBazaarProtocol
from binascii import unhexlify
log.startLogging(sys.stdout)
datafolder = expanduser("~") + "/OpenBazaar/"
if not os.path.exists(datafolder):
os.makedirs(datafolder)
def get_data_folder():
return datafolder
response = stun.get_ip_info(stun_host="stun.l.google.com", source_port=0, stun_port=19302)
ip_address = response[1]
port = response[2]
# key generation for testing
if os.path.isfile(datafolder + 'keys.pickle'):
keys = pickle.load(open(datafolder + "keys.pickle", "r"))
g = keys["guid"]
else:
print "Generating GUID, stand by..."
g = GUID()
keys = {'guid': g}
pickle.dump(keys, open(datafolder + "keys.pickle", "wb"))
protocol = OpenBazaarProtocol((ip_address, port))
# kademlia
node = Node(g.guid, signed_pubkey=g.signed_pubkey)
if os.path.isfile(datafolder + 'cache.pickle'):
kserver = Server.loadState(datafolder + 'cache.pickle', ip_address, port, protocol)
else :
kserver = Server(node)
kserver.protocol.connect_multiplexer(protocol)
kserver.bootstrap(kserver.querySeed("162.213.253.147:8080", "909b4f614ec4fc8c63aab83b91bc620d7a238600bf256472e968fdafce200128"))
kserver.saveStateRegularly(datafolder + 'cache.pickle', 10)
protocol.register_processor(kserver.protocol)
reactor.listenUDP(18467, protocol)
# RPC-Server
class RPCCalls(jsonrpc.JSONRPC):
def jsonrpc_getpubkey(self):
return hexlify(g.signed_pubkey)
def jsonrpc_getinfo(self):
info = {"version": "0.1"}
num_peers = 0
for bucket in kserver.protocol.router.buckets:
num_peers += bucket.__len__()
info["known peers"] = num_peers
info["stored messages"] = len(kserver.storage.data)
size = sys.getsizeof(kserver.storage.data)
size += sum(map(sys.getsizeof, kserver.storage.data.itervalues())) + sum(
map(sys.getsizeof, kserver.storage.data.iterkeys()))
info["db size"] = size
return info
def jsonrpc_set(self, keyword, key):
def handle_result(result):
print "JSONRPC result:", result
d = kserver.set(str(keyword), digest(key), node.getProto().SerializeToString())
d.addCallback(handle_result)
return "Sending store request..."
def jsonrpc_get(self, keyword):
def handle_result(result):
print "JSONRPC result:", result
d = kserver.get(keyword)
d.addCallback(handle_result)
return "Sent get request. Check log output for result"
def jsonrpc_delete(self, keyword, key):
def handle_result(result):
print "JSONRPC result:", result
signature = g.signing_key.sign(digest(key))
d = kserver.delete(str(keyword), digest(key), signature[:64])
d.addCallback(handle_result)
return "Sending delete request..."
def jsonrpc_shutdown(self):
for addr in kserver.protocol:
connection = kserver.protocol._active_connections.get(addr)
if connection is not None:
connection.shutdown()
return "Closing all connections."
def jsonrpc_getpeers(self):
peers = []
for bucket in kserver.protocol.router.buckets:
for node in bucket.getNodes():
peers.append(node.id.encode("hex"))
return peers
def jsonrpc_getnode(self, guid):
n = kserver.get_node(unhexlify(guid))
return n
factory = jsonrpc.RPCFactory(RPCCalls)
factory.addIntrospection()
reactor.listenTCP(18465, factory, interface="127.0.0.1")
reactor.run() | {
"repo_name": "jorik041/Network",
"path": "openbazaard.py",
"copies": "1",
"size": "4004",
"license": "mit",
"hash": 3096695091277227000,
"line_mean": 31.2983870968,
"line_max": 132,
"alpha_frac": 0.6760739261,
"autogenerated": false,
"ratio": 3.584601611459266,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4760675537559266,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
"""
Just using this class for testing the DHT for now.
We will fit the actual implementation in where appropriate.
"""
import stun
import os
import sys
import dht.constants
from db.datastore import create_database
from twisted.internet import reactor
from twisted.python import log, logfile
from twisted.web.server import Site
from twisted.web.static import File
from keyutils.keys import KeyChain
from dht.network import Server
from dht.node import Node
from wireprotocol import OpenBazaarProtocol
from constants import DATA_FOLDER, DATABASE
from txjsonrpc.netstring import jsonrpc
from networkcli import RPCCalls
from market import network
from market.listeners import MessageListenerImpl, NotificationListenerImpl
from ws import WSFactory, WSProtocol
from autobahn.twisted.websocket import listenWS
from restapi import OpenBazaarAPI
from dht.storage import PersistentStorage
# logging
logFile = logfile.LogFile.fromFullPath(DATA_FOLDER + "debug.log")
log.addObserver(log.FileLogObserver(logFile).emit)
log.startLogging(sys.stdout)
# stun
print "Finding NAT Type.."
response = stun.get_ip_info(stun_host='seed.openbazaar.org', stun_port=3478, source_port=0)
print "%s on %s:%s" % (response[0], response[1], response[2])
ip_address = response[1]
port = response[2]
# database
if not os.path.isfile(DATABASE):
create_database()
# key generation
keys = KeyChain()
def on_bootstrap_complete(resp):
mlistener = MessageListenerImpl(ws_factory)
mserver.get_messages(mlistener)
mserver.protocol.add_listener(mlistener)
nlistener = NotificationListenerImpl(ws_factory)
mserver.protocol.add_listener(nlistener)
protocol = OpenBazaarProtocol((ip_address, port))
# kademlia
node = Node(keys.guid, ip_address, port, signed_pubkey=keys.guid_signed_pubkey)
if os.path.isfile(DATA_FOLDER + 'cache.pickle'):
kserver = Server.loadState(DATA_FOLDER + 'cache.pickle', ip_address, port, protocol,
on_bootstrap_complete, storage=PersistentStorage(DATABASE))
else:
kserver = Server(node, dht.constants.KSIZE, dht.constants.ALPHA, storage=PersistentStorage(DATABASE))
kserver.protocol.connect_multiplexer(protocol)
kserver.bootstrap(
kserver.querySeed("162.213.253.147:8080",
"5b56c8daeb3b37c8a9b47be6102fa43b9f069f58dcb57475984041b26c99e389"))\
.addCallback(on_bootstrap_complete)
kserver.saveStateRegularly(DATA_FOLDER + 'cache.pickle', 10)
protocol.register_processor(kserver.protocol)
# market
mserver = network.Server(kserver, keys.signing_key)
mserver.protocol.connect_multiplexer(protocol)
protocol.register_processor(mserver.protocol)
reactor.listenUDP(port, protocol)
# json-rpc server
factory = jsonrpc.RPCFactory(RPCCalls(kserver, mserver, keys))
reactor.listenTCP(18465, factory, interface="127.0.0.1")
# web sockets
ws_factory = WSFactory("ws://127.0.0.1:18466", mserver, kserver)
ws_factory.protocol = WSProtocol
ws_factory.setProtocolOptions(allowHixie76=True)
listenWS(ws_factory)
webdir = File(".")
web = Site(webdir)
reactor.listenTCP(9000, web, interface="127.0.0.1")
# rest api
api = OpenBazaarAPI(mserver, kserver)
site = Site(api, timeout=None)
reactor.listenTCP(18469, site, interface="127.0.0.1")
reactor.run()
| {
"repo_name": "the9ull/OpenBazaar-Server",
"path": "openbazaard.py",
"copies": "1",
"size": "3263",
"license": "mit",
"hash": 3674450258859289000,
"line_mean": 31.63,
"line_max": 105,
"alpha_frac": 0.7670855041,
"autogenerated": false,
"ratio": 3.272818455366098,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45399039594660984,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
class DataException(Exception):
pass
class DataField(object):
DEFAULT_FORMAT = "{name}{index}: {value}{unit}"
def __init__(self, name, index=None, friendly_name=None, unit=None, default_value=None):
self.name = name
self.index = index
self.friendly_name = friendly_name
self.unit = unit
self.default_value = default_value
def get_value(self, data_dict):
if self.index is not None:
return data_dict[self.name][self.index]
else:
return data_dict[self.name]
def in_dict(self, result_dict):
if self.name not in result_dict:
return False
if self.index is not None:
return type(result_dict[self.name]) in (tuple, list) and len(result_dict[self.name]) > self.index
else:
return True
def to_str(self, value=None, str_format=None):
if value is None:
if self.default_value is None:
raise DataException('No value or default_value to print')
value = self.default_value
name = self.friendly_name if self.friendly_name else self.name
index = "[{}]".format(self.index) if self.index is not None and not self.friendly_name else ''
unit = " {}".format(self.unit) if self.unit is not None else ''
return (str_format or self.DEFAULT_FORMAT).format(
name=name, index=index, value=value, unit=unit
)
class DataLimit(object):
def __init__(self, minimum=None, maximum=None):
self.minimum = minimum
self.maximum = maximum
def test(self, value):
flag = True
if self.maximum is not None:
flag &= value < self.maximum
if self.minimum is not None:
flag &= value > self.minimum
return flag
def generate_data_field_list(field_dict_list):
if field_dict_list is None:
return None
return [DataField(**f) for f in field_dict_list]
| {
"repo_name": "ravngr/jtfadump2",
"path": "data.py",
"copies": "1",
"size": "1999",
"license": "mit",
"hash": -4623626147815155000,
"line_mean": 27.5571428571,
"line_max": 109,
"alpha_frac": 0.5987993997,
"autogenerated": false,
"ratio": 3.866537717601547,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4965337117301547,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Chris'
"""
Preps the extracted Python code so that it can be evaled by the
monkey_parser
"""
from itertools import *
source = '''
import sys
import os
import doctest
import cProfile
import pstats
from argparse import ArgumentParser
from argparse import RawDescriptionHelpFormatter
from gooey import Gooey
parser = ArgumentParser(description='Example Argparse Program', formatter_class=RawDescriptionHelpFormatter)
parser.add_argument('filename', help='filename')
parser.add_argument('-r', '--recursive', dest='recurse', action='store_true', help='recurse into subfolders [default: %(default)s]')
parser.add_argument('-v', '--verbose', dest='verbose', action='count', help='set verbosity level [default: %(default)s]')
parser.add_argument('-i', '--include', action='append', help='only include paths matching this regex pattern. Note: exclude is given preference over include. [default: %(default)s]', metavar='RE')
parser.add_argument('-m', '--mycoolargument', help='mycoolargument')
parser.add_argument('-e', '--exclude', dest='exclude', help='exclude paths matching this regex pattern. [default: %(default)s]', metavar='RE')
parser.add_argument('-V', '--version', action='version')
parser.add_argument('-T', '--tester', choices=['yes', 'no'])
parser.add_argument(dest='paths', help='paths to folder(s) with source file(s) [default: %(default)s]', metavar='path', nargs='+')
'''
def take_imports(code):
return takewhile(lambda line: 'import' in line, code)
def drop_imports(code):
return dropwhile(lambda line: 'import' in line, code)
def split_line(line):
# splits an assignment statement into varname and command strings
# in: "parser = ArgumentParser(description='Example Argparse Program')"
# out: "parser", "ArgumentParser(description='Example Argparse Program"
variable, instruction = line.split('=', 1)
return variable.strip(), instruction.strip()
def update_parser_varname(new_varname, code):
# lines = source.split('\n')[1:]
lines = filter(lambda x: x != '', code)
argparse_code = dropwhile(lambda line: 'import' in line, lines)
old_argparser_varname, _ = split_line(argparse_code.next())
updated_code = [line.replace(old_argparser_varname, new_varname)
for line in lines]
return updated_code
if __name__ == '__main__':
pass
| {
"repo_name": "lrq3000/pyFileFixity",
"path": "pyFileFixity/lib/gooey/python_bindings/code_prep.py",
"copies": "1",
"size": "2310",
"license": "mit",
"hash": -268749260685091400,
"line_mean": 36.868852459,
"line_max": 196,
"alpha_frac": 0.7125541126,
"autogenerated": false,
"ratio": 3.7560975609756095,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.496865167357561,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
# pylint: disable=import-error
#import guidc
from binascii import hexlify, unhexlify
import nacl.signing
import nacl.hash
import nacl.encoding
def _testpow(pow_hash):
return True if int(pow_hash, 16) < 50 else False
class GUID(object):
"""
Class for generating the guid. It can be generated using C code for a modest
speed boost but it is currently disabled to make it easier to compile the app.
"""
# pylint: disable=W0633
def __init__(self, keys=None, use_C_lib=False):
if keys is None:
if use_C_lib: # disabled for now
# self.privkey = unhexlify(guidc.generate())
self.privkey = None
self.signing_key = nacl.signing.SigningKey(self.privkey)
self.verify_key = verify_key = self.signing_key.verify_key
h = nacl.hash.sha512(verify_key.encode())
self.guid = unhexlify(h[:40])
else:
self.generate()
else:
self.signing_key, self.verify_key, self.guid = keys
def generate(self):
valid_pow = False
while not valid_pow:
signing_key = nacl.signing.SigningKey.generate()
verify_key = signing_key.verify_key
h = nacl.hash.sha512(verify_key.encode())
pow_hash = h[40:]
valid_pow = _testpow(pow_hash[:6])
self.signing_key = signing_key
self.verify_key = verify_key
self.guid = unhexlify(h[:40])
@classmethod
def from_privkey(cls, privkey):
signing_key = nacl.signing.SigningKey(privkey, encoder=nacl.encoding.HexEncoder)
verify_key = signing_key.verify_key
h = nacl.hash.sha512(verify_key.encode())
pow_hash = h[40:]
if _testpow(pow_hash[:6]):
return GUID((signing_key, verify_key, unhexlify(h[:40])))
def __str__(self):
return "privkey: %s\npubkey: %s\nguid: %s" % (
self.signing_key.encode(encoder=nacl.encoding.HexEncoder),
self.verify_key.encode(encoder=nacl.encoding.HexEncoder),
hexlify(self.guid))
| {
"repo_name": "tomgalloway/OpenBazaar-Server",
"path": "keys/guid.py",
"copies": "6",
"size": "2128",
"license": "mit",
"hash": -9212765513834398000,
"line_mean": 34.4666666667,
"line_max": 88,
"alpha_frac": 0.5968045113,
"autogenerated": false,
"ratio": 3.5585284280936453,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.7155332939393645,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
# pylint: disable=import-error
#import guidc
from binascii import hexlify, unhexlify
import nacl.signing
import nacl.hash
def _testpow(pow_hash):
return True if int(pow_hash, 16) < 50 else False
class GUID(object):
"""
Class for generating the guid. It can be generated using C code for a modest
speed boost but it is currently disabled to make it easier to compile the app.
"""
# pylint: disable=W0633
def __init__(self, keys=None, use_C_lib=False):
if keys is None:
if use_C_lib: # disabled for now
# self.privkey = unhexlify(guidc.generate())
self.privkey = None
self.signing_key = nacl.signing.SigningKey(self.privkey)
verify_key = self.signing_key.verify_key
signed = self.signing_key.sign(str(verify_key))
h = nacl.hash.sha512(signed)
self.signed_pubkey = signed
self.guid = unhexlify(h[:40])
else:
self.privkey = self.generate()
else:
self.signing_key, self.guid, self.signed_pubkey, self.privkey = keys
def generate(self):
valid_pow = False
while not valid_pow:
signing_key = nacl.signing.SigningKey.generate()
verify_key = signing_key.verify_key
signed = signing_key.sign(str(verify_key))
h = nacl.hash.sha512(signed)
pow_hash = h[64:128]
valid_pow = _testpow(pow_hash[:6])
self.signing_key = signing_key
self.guid = unhexlify(h[:40])
self.signed_pubkey = signed
return signing_key.encode()
@classmethod
def from_privkey(cls, privkey):
signing_key = nacl.signing.SigningKey(privkey)
verify_key = signing_key.verify_key
signed = signing_key.sign(str(verify_key))
h = nacl.hash.sha512(signed)
pow_hash = h[64:128]
if _testpow(pow_hash[:6]):
return GUID((signing_key, unhexlify(h[:40]), signed, privkey))
def __str__(self):
return "privkey: %s\nsigned pubkey: %s\nguid: %s" % (
hexlify(self.privkey), hexlify(self.signed_pubkey), hexlify(self.guid))
| {
"repo_name": "Joaz/OpenBazaar-Server",
"path": "keyutils/guid.py",
"copies": "4",
"size": "2226",
"license": "mit",
"hash": 3231197225558858000,
"line_mean": 34.9032258065,
"line_max": 83,
"alpha_frac": 0.5893980234,
"autogenerated": false,
"ratio": 3.5787781350482315,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.003415154045093156,
"num_lines": 62
} |
__author__ = 'chris'
# pylint: disable=import-error
import guidc
from binascii import hexlify, unhexlify
import nacl.signing
import nacl.hash
def _testpow(pow_hash):
return True if int(pow_hash, 16) < 50 else False
class GUID(object):
# pylint: disable=W0633
def __init__(self, keys=None, use_C_lib=False):
if keys is None:
if use_C_lib:
self.privkey = unhexlify(guidc.generate())
self.signing_key = nacl.signing.SigningKey(self.privkey)
verify_key = self.signing_key.verify_key
signed = self.signing_key.sign(str(verify_key))
h = nacl.hash.sha512(signed)
self.signed_pubkey = signed
self.guid = unhexlify(h[:40])
else:
self.privkey = self.generate()
else:
self.signing_key, self.guid, self.signed_pubkey, self.privkey = keys
def generate(self):
valid_pow = False
while not valid_pow:
signing_key = nacl.signing.SigningKey.generate()
verify_key = signing_key.verify_key
signed = signing_key.sign(str(verify_key))
h = nacl.hash.sha512(signed)
pow_hash = h[64:128]
valid_pow = _testpow(pow_hash[:6])
self.signing_key = signing_key
self.guid = unhexlify(h[:40])
self.signed_pubkey = signed
return signing_key.encode()
@classmethod
def from_privkey(cls, privkey):
signing_key = nacl.signing.SigningKey(privkey)
verify_key = signing_key.verify_key
signed = signing_key.sign(str(verify_key))
h = nacl.hash.sha512(signed)
pow_hash = h[64:128]
if _testpow(pow_hash[:6]):
return GUID((signing_key, unhexlify(h[:40]), signed, privkey))
def __str__(self):
return "privkey: %s\nsigned pubkey: %s\nguid: %s" % (
hexlify(self.privkey), hexlify(self.signed_pubkey), hexlify(self.guid))
| {
"repo_name": "hoffmabc/OpenBazaar-Server",
"path": "keyutils/guid.py",
"copies": "4",
"size": "1986",
"license": "mit",
"hash": -459232462140063740,
"line_mean": 34.4642857143,
"line_max": 83,
"alpha_frac": 0.5835850957,
"autogenerated": false,
"ratio": 3.4903339191564147,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0020699483497102545,
"num_lines": 56
} |
__author__ = 'chris'
"""Swagger to WADL utility"""
import sys, urllib3, json, re
import xml.etree.ElementTree as tree
from argparse import ArgumentParser
def camel_case(value):
words = value.split("_")
return ''.join([words[0]] + list(word.title() for word in words[1:]))
def add_request(parent, parameters):
try:
data_type = list(request['dataType'] for request in parameters if request['paramType'] == 'body').pop()
except:
data_type = None
request = tree.SubElement(parent, 'request')
add_parameters(request, parameters)
if data_type is not None:
tree.SubElement(request, 'representation', {"mediaType": "application/json", "json:describedBy": data_type})
def add_responses(parent, responses):
for response_message in responses:
response = tree.SubElement(parent, 'response', {'status': str(response_message['code'])})
if 'message' in response_message:
doc = tree.SubElement(response, 'doc')
doc.text = response_message['message']
if 'responseModel' in response_message:
tree.SubElement(response,'representation',
{'mediaType': 'application/json', 'json:describedBy': response_message['responseModel']})
def add_parameters(parent, parameters):
for param in parameters:
param_name = camel_case(param['name'])
required = str(param['required']).lower()
if param['paramType'] not in ['body', 'path']:
tree.SubElement(parent, 'param',
{'name': param_name,
'style': param['paramType'],
'required': required})
def add_operations(parent, operations):
for operation in operations:
method = tree.SubElement(parent, 'method', {'name': operation['method'].upper()})
if 'notes' in operation:
doc = tree.SubElement(method, 'doc')
doc.text = operation['notes']
add_request(method, operation['parameters'])
add_responses(method, operation['responseMessages'])
def create_wadl(spec, endpoint):
wadl = tree.Element('application')
doc = tree.SubElement(wadl,'doc')
paths = list(api['path'] for api in spec['apis'])
if 'description' in spec:
doc.text = spec['description']
resources = tree.SubElement(wadl, 'resources', {'base': endpoint + spec['resourcePath']})
"""Loop through the APIs and add as resources.
Any template-style parameters needs to be added as param and resource"""
for api in spec['apis']:
"""Check whether this includes a template-style parameter. If it does, process as resource and param"""
param = re.search(r'/\{(.+?)\}', api['path'])
if param is not None:
raw_param = param.group().replace("/","")
parent_path = re.sub('^/|/$','',api['path'].replace(raw_param,''))
resource = tree.SubElement(resources, 'resource', {'path': parent_path})
"""if '/' + parent_path in paths:
add_parameters(resource,
list(api for api in spec['apis'] if api['path'] == '/' + parent_path).pop()['operations'])"""
param = camel_case(param.group().replace("/",""))
template = tree.SubElement(resource, 'resource', {'path': param})
tree.SubElement(template, 'param',
{'name': re.sub('(\{|\})','',param), 'style': 'template', 'required': 'true'})
add_operations(template, api['operations'])
else:
path = re.sub('^/', '', api['path'])
resource = wadl.find('.//resource[@path="' + path + '"]')
if resource is None:
resource = tree.SubElement(resources, 'resource', {'path': re.sub('^/', '', api['path'])})
add_operations(resource, api['operations'])
wadl.set('xmlns', 'http://wadl.dev.java.net/2009/02')
wadl.set('xmlns:xsi','http://www.w3.org/2001/XMLSchema-instance')
wadl.set('xsi:schemaLocation', 'http://wadl.dev.java.net/2009/02 http://www.w3.org/Submission/wadl/wadl.xsd')
wadl.set("xmlns:json", "http://wadl.dev.java.net/2009/02/json-schema")
tree.dump(wadl)
def main():
argp = ArgumentParser("Create WADL from Swagger specification")
argp.add_argument("url", help="Swagger URL (JSON spec)")
argp.add_argument("endpoint", help="Real API protocol/host/port")
args = argp.parse_args()
manager = urllib3.PoolManager()
spec = json.loads(manager.urlopen('GET', args.url).data.decode("utf-8"))
create_wadl(spec, args.endpoint)
if __name__ == "__main__":
main()
| {
"repo_name": "SensibleWood/swagger2wadl",
"path": "swagger2wadl.py",
"copies": "1",
"size": "4672",
"license": "mit",
"hash": -7446931661059921000,
"line_mean": 38.9316239316,
"line_max": 124,
"alpha_frac": 0.5948202055,
"autogenerated": false,
"ratio": 3.8803986710963456,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49752188765963457,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chris'
"""
This protocol class handles all direct (non-kademlia) messages between nodes.
All of the messages between a buyer and a vendor's store can be found here.
"""
import json
from zope.interface import implements
from rpcudp import RPCProtocol
from interfaces import MessageProcessor
from log import Logger
from openbazaard import get_data_folder
from protos.message import GET_CONTRACT
class MarketProtocol(RPCProtocol):
implements(MessageProcessor)
def __init__(self, node_proto, router):
self.router = router
RPCProtocol.__init__(self, node_proto, router)
self.log = Logger(system=self)
self.handled_commands = [GET_CONTRACT]
def connect_multiplexer(self, multiplexer):
self.multiplexer = multiplexer
def rpc_get_contract(self, sender, contract_hash):
self.log.info("Looking up contract ID" % long(contract_hash.encode('hex'), 16))
self.router.addContact(sender)
try:
with open(get_data_folder() + "/Store/Listings/Contracts/" + str(long(contract_hash.encode('hex'), 16)) + '.json') as contract_file:
data = json.load(contract_file)
return str(data)
except:
return "None"
def call_get_contract(self, nodeToAsk, contract_hash):
address = (nodeToAsk.ip, nodeToAsk.port)
d = self.get_contract(address, contract_hash)
return d.addCallback(self.handleCallResponse, nodeToAsk)
def handleCallResponse(self, result, node):
"""
If we get a response, add the node to the routing table. If
we get no response, make sure it's removed from the routing table.
"""
if result[0]:
self.log.info("got response from %s, adding to router" % node)
self.router.addContact(node)
else:
self.log.debug("no response from %s, removing from router" % node)
self.router.removeContact(node)
return result
def __iter__(self):
return iter(self.handled_commands)
| {
"repo_name": "jorik041/Network",
"path": "market/protocol.py",
"copies": "1",
"size": "2057",
"license": "mit",
"hash": 2905390888201828400,
"line_mean": 33.8644067797,
"line_max": 144,
"alpha_frac": 0.652892562,
"autogenerated": false,
"ratio": 3.9106463878326996,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.50635389498327,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chrispaulson'
import math as m
import os
import pickle
import numpy as np
import pyKriging
class samplingplan():
def __init__(self, k=2):
self.samplingplan = []
self.k = k
self.path = os.path.dirname(pyKriging.__file__)
self.path = self.path + '/sampling_plans/'
def rlh(self, n, Edges=0):
"""
Generates a random latin hypercube within the [0,1]^k hypercube
Inputs:
n-desired number of points
k-number of design variables (dimensions)
Edges-if Edges=1 the extreme bins will have their centers on the edges of the domain
Outputs:
Latin hypercube sampling plan of n points in k dimensions
"""
# pre-allocate memory
X = np.zeros((n, self.k))
# exclude 0
for i in xrange(0, self.k):
X[:, i] = np.transpose(np.random.permutation(np.arange(1, n + 1, 1)))
if Edges == 1:
X = (X - 1) / (n - 1)
else:
X = (X - 0.5) / n
return X
def optimallhc(self, n, population=30, iterations=30, generation=False):
"""
Generates an optimized Latin hypercube by optimizing the Morris-Mitchell
criterion for a range of exponents and plots the first two dimensions of
the current hypercube throughout the optimization process.
Inputs:
n - number of points required
Population - number of individuals in the evolutionary operation
optimizer
Iterations - number of generations the evolutionary operation
optimizer is run for
Note: high values for the two inputs above will ensure high quality
hypercubes, but the search will take longer.
generation - if set to True, the LHC will be generated. If 'False,' the algorithm will check for an existing plan before generating.
Output:
X - optimized Latin hypercube
"""
if not generation:
# Check for existing LHC sampling plans
if os.path.isfile('{0}lhc_{1}_{2}.pkl'.format(self.path, self.k, n)):
X = pickle.load(open('{0}lhc_{1}_{2}.pkl'.format(self.path, self.k, n), 'r'))
return X
else:
print self.path
print 'SP not found on disk, generating it now.'
# list of qs to optimise Phi_q for
q = [1, 2, 5, 10, 20, 50, 100]
# Set the distance norm to rectangular for a faster search. This can be
# changed to p=2 if the Euclidean norm is required.
p = 1
# we start with a random Latin hypercube
XStart = self.rlh(n)
X3D = np.zeros((n, self.k, len(q)))
# for each q optimize Phi_q
for i in xrange(len(q)):
print ('Now_optimizing_for_q = %d \n' % q[i])
X3D[:, :, i] = self.mmlhs(XStart, population, iterations, q[i])
# sort according to the Morris-Mitchell criterion
Index = self.mmsort(X3D, p)
print ('Best_lh_found_using_q = %d \n' % q[Index[1]])
# and the Latin hypercube with the best space-filling properties is
X = X3D[:, :, Index[1]]
pickle.dump(X, open('{0}lhc_{1}_{2}.pkl'.format(self.path, self.k, n), 'wb'))
return X
def fullfactorial(self, ppd=5):
ix = (slice(0, 1, ppd * 1j),) * self.k
a = np.mgrid[ix].reshape(self.k, ppd ** self.k).T
return a
def mmsort(self, X3D, p=1):
"""
Ranks sampling plans according to the Morris-Mitchell criterion definition.
Note: similar to phisort, which uses the numerical quality criterion Phiq
as a basis for the ranking.
Inputs:
X3D - three-dimensional array containing the sampling plans to be ranked.
p - the distance metric to be used (p=1 rectangular - default, p=2 Euclidean)
Output:
Index - index array containing the ranking
"""
# Pre-allocate memory
Index = np.arange(np.size(X3D, axis=2))
# Bubble-sort
swap_flag = 1
while swap_flag == 1:
swap_flag = 0
i = 1
while i <= len(Index) - 2:
if self.mm(X3D[:, :, Index[i]], X3D[:, :, Index[i + 1]], p) == 2:
arrbuffer = Index[i]
Index[i] = Index[i + 1]
Index[i + 1] = arrbuffer
swap_flag = 1
i = i + 1
return Index
def perturb(self, X, PertNum):
"""
Interchanges pairs of randomly chosen elements within randomly
chosen columns of a sampling plan a number of times. If the plan is
a Latin hypercube, the result of this operation will also be a Latin
hypercube.
Inputs:
X - sampling plan
PertNum - the number of changes (perturbations) to be made to X.
Output:
X - perturbed sampling plan
"""
X_pert = X.copy()
[n, k] = np.shape(X_pert)
for pert_count in range(0, PertNum):
col = m.floor(np.random.rand(1) * k)
# Choosing two distinct random points
el1 = 0
el2 = 0
while el1 == el2:
el1 = m.floor(np.random.rand(1) * n)
el2 = m.floor(np.random.rand(1) * n)
# swap the two chosen elements
arrbuffer = X_pert[el1, col]
X_pert[el1, col] = X_pert[el2, col]
X_pert[el2, col] = arrbuffer
return X_pert
def mmlhs(self, X_start, population, iterations, q):
"""
Evolutionary operation search for the most space filling Latin hypercube
of a certain size and dimensionality. There is no need to call this
directly - use bestlh.m
"""
X_s = X_start.copy()
n = np.size(X_s, 0)
X_best = X_s
Phi_best = self.mmphi(X_best)
leveloff = m.floor(0.85 * iterations)
for it in range(0, iterations):
if it < leveloff:
mutations = int(round(1 + (0.5 * n - 1) * (leveloff - it) / (leveloff - 1)))
else:
mutations = 1
X_improved = X_best
Phi_improved = Phi_best
for offspring in range(0, population):
X_try = self.perturb(X_best, mutations)
Phi_try = self.mmphi(X_try, q)
if Phi_try < Phi_improved:
X_improved = X_try
Phi_improved = Phi_try
if Phi_improved < Phi_best:
X_best = X_improved
Phi_best = Phi_improved
return X_best
def mmphi(self, X, q=2, p=1):
"""
Calculates the sampling plan quality criterion of Morris and Mitchell
Inputs:
X - Sampling plan
q - exponent used in the calculation of the metric (default = 2)
p - the distance metric to be used (p=1 rectangular - default , p=2 Euclidean)
Output:
Phiq - sampling plan 'space-fillingness' metric
"""
# calculate the distances between all pairs of
# points (using the p-norm) and build multiplicity array J
J, d = self.jd(X, p)
# the sampling plan quality criterion
Phiq = (np.sum(J * (d ** (-q)))) ** (1.0 / q)
return Phiq
def jd(self, X, p=1):
"""
Computes the distances between all pairs of points in a sampling plan
X using the p-norm, sorts them in ascending order and removes multiple occurences.
Inputs:
X-sampling plan being evaluated
p-distance norm (p=1 rectangular-default, p=2 Euclidean)
Output:
J-multiplicity array (that is, the number of pairs separated by each distance value)
distinct_d-list of distinct distance values
"""
# number of points in the sampling plan
n = np.size(X[:, 1])
# computes the distances between all pairs of points
d = np.zeros((n * (n - 1) / 2))
# for i in xrange(n-1):
# for j in xrange(i+1,n):
# if i == 0:
# d[i+j-1] = np.linalg.norm((rld[0,:]-rld[j,:]),2)
# else:
# d[((i-1)*n - (i-1)*i/2 + j - i )] = np.linalg.norm((X[i,:] - X[j,:]),2)
# an alternative way of the above loop
list = [(i, j) for i in xrange(n - 1) for j in xrange(i + 1, n)]
for k, l in enumerate(list):
d[k] = np.linalg.norm((X[l[0], :] - X[l[1], :]), p)
# remove multiple occurences
distinct_d = np.unique(d)
# pre-allocate memory for J
J = np.zeros(np.size(distinct_d))
# generate multiplicity array
for i in xrange(len(distinct_d)):
# J(i) will contain the number of pairs separated
# by the distance distinct_d(i)
J[i] = np.sum(self.ismember(d, distinct_d[i]))
return J, distinct_d
def ismember(self, A, B):
return [np.sum(a == B) for a in A]
def mm(self, X1, X2, p=1):
"""
Given two sampling plans chooses the one with the better space-filling properties
(as per the Morris-Mitchell criterion)
Inputs:
X1,X2-the two sampling plans
p- the distance metric to be used (p=1 rectangular-default, p=2 Euclidean)
Outputs:
Mmplan-if Mmplan=0, identical plans or equally space-
filling, if Mmplan=1, X1 is more space filling, if Mmplan=2,
X2 is more space filling
"""
# thats how two arrays are compared in their sorted form
v = np.sort(X1) == np.sort(X2)
if v.all() == True: # if True, then the designs are the same
# if np.array_equal(X1,X2) == True:
return 0
else:
# calculate the distance and multiplicity arrays
[J1, d1] = self.jd(X1, p);
m1 = len(d1)
[J2, d2] = self.jd(X2, p);
m2 = len(d2)
# blend the distance and multiplicity arrays together for
# comparison according to definition 1.2B. Note the different
# signs - we are maximising the d's and minimising the J's.
V1 = np.zeros((2 * m1))
V1[0:len(V1):2] = d1
V1[1:len(V1):2] = -J1
V2 = np.zeros((2 * m2))
V2[0:len(V2):2] = d2
V2[1:len(V2):2] = -J2
# the longer vector can be trimmed down to the length of the shorter one
m = min(m1, m2)
V1 = V1[0:m]
V2 = V2[0:m]
# generate vector c such that c(i)=1 if V1(i)>V2(i), c(i)=2 if V1(i)<V2(i)
# c(i)=0 otherwise
c = np.zeros(m)
for i in xrange(m):
if np.greater(V1[i], V2[i]) == True:
c[i] = 1
elif np.less(V1[i], V2[i]) == True:
c[i] = 2
elif np.equal(V1[i], V2[i]) == True:
c[i] = 0
# If the plans are not identical but have the same space-filling
# properties
if sum(c) == 0:
return 0
else:
# the more space-filling design (mmplan)
# is the first non-zero element of c
i = 0
while c[i] == 0:
i = i + 1
return c[i]
if __name__ == '__main__':
# print fullfactorial2d(2)
# print fullfactorial3d(2)
# print fullfactorial4d(2)
# print fullfactorial5d(2)
# print optimalLHC()
sp = samplingplan(k=2)
print sp.fullfactorial()
print sp.rlh(15)
print sp.optimallhc(16)
| {
"repo_name": "DailyActie/Surrogate-Model",
"path": "01-codes/pyKriging-master/pyKriging/samplingplan.py",
"copies": "1",
"size": "11886",
"license": "mit",
"hash": -6818585781957699000,
"line_mean": 32.3876404494,
"line_max": 144,
"alpha_frac": 0.5237253912,
"autogenerated": false,
"ratio": 3.6193666260657733,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4643092017265773,
"avg_score": null,
"num_lines": null
} |
__author__ = 'chrispaulson'
import numpy as np
import math as m
import os
import pickle
import pyKriging
class samplingplan():
def __init__(self,k=2):
self.samplingplan = []
self.k = k
self.path = os.path.dirname(pyKriging.__file__)
self.path = self.path+'/sampling_plans/'
def rlh(self,n,Edges=0):
"""
Generates a random latin hypercube within the [0,1]^k hypercube
Inputs:
n-desired number of points
k-number of design variables (dimensions)
Edges-if Edges=1 the extreme bins will have their centers on the edges of the domain
Outputs:
Latin hypercube sampling plan of n points in k dimensions
"""
#pre-allocate memory
X = np.zeros((n,self.k))
#exclude 0
for i in range(0,self.k):
X[:,i] = np.transpose(np.random.permutation(np.arange(1,n+1,1)))
if Edges == 1:
X = (X-1)/(n-1)
else:
X = (X-0.5)/n
return X
def optimallhc(self,n,population=30, iterations=30, generation=False):
"""
Generates an optimized Latin hypercube by optimizing the Morris-Mitchell
criterion for a range of exponents and plots the first two dimensions of
the current hypercube throughout the optimization process.
Inputs:
n - number of points required
Population - number of individuals in the evolutionary operation
optimizer
Iterations - number of generations the evolutionary operation
optimizer is run for
Note: high values for the two inputs above will ensure high quality
hypercubes, but the search will take longer.
generation - if set to True, the LHC will be generated. If 'False,' the algorithm will check for an existing plan before generating.
Output:
X - optimized Latin hypercube
"""
## TODO: This code isnt working in the Python3 branch.
# if not generation:
# Check for existing LHC sampling plans
# if os.path.isfile('{0}lhc_{1}_{2}.pkl'.format(self.path,self.k, n)):
# X = pickle.load(open('{0}lhc_{1}_{2}.pkl'.format(self.path,self.k, n), 'rb'))
# return X
# else:
# print(self.path)
# print('SP not found on disk, generating it now.')
#list of qs to optimise Phi_q for
q = [1,2,5,10,20,50,100]
#Set the distance norm to rectangular for a faster search. This can be
#changed to p=2 if the Euclidean norm is required.
p = 1
#we start with a random Latin hypercube
XStart = self.rlh(n)
X3D = np.zeros((n,self.k,len(q)))
#for each q optimize Phi_q
for i in range(len(q)):
print(('Now_optimizing_for_q = %d \n' %q[i]))
X3D[:,:,i] = self.mmlhs(XStart, population, iterations, q[i])
#sort according to the Morris-Mitchell criterion
Index = self.mmsort(X3D,p)
print(('Best_lh_found_using_q = %d \n' %q[Index[1]]))
#and the Latin hypercube with the best space-filling properties is
X = X3D[:,:,Index[1]]
# pickle.dump(X, open('{0}lhc_{1}_{2}.pkl'.format(self.path,self.k, n), 'wb'))
return X
def fullfactorial(self, ppd=5):
ix = (slice(0, 1, ppd*1j),) * self.k
a = np.mgrid[ix].reshape(self.k, ppd**self.k).T
return a
def mmsort(self,X3D,p=1):
"""
Ranks sampling plans according to the Morris-Mitchell criterion definition.
Note: similar to phisort, which uses the numerical quality criterion Phiq
as a basis for the ranking.
Inputs:
X3D - three-dimensional array containing the sampling plans to be ranked.
p - the distance metric to be used (p=1 rectangular - default, p=2 Euclidean)
Output:
Index - index array containing the ranking
"""
#Pre-allocate memory
Index = np.arange(np.size(X3D,axis=2))
#Bubble-sort
swap_flag = 1
while swap_flag == 1:
swap_flag = 0
i = 1
while i<=len(Index)-2:
if self.mm(X3D[:,:,Index[i]],X3D[:,:,Index[i+1]],p) == 2:
arrbuffer=Index[i]
Index[i] = Index[i+1]
Index[i+1] = arrbuffer
swap_flag=1
i = i + 1
return Index
def perturb(self,X,PertNum):
"""
Interchanges pairs of randomly chosen elements within randomly
chosen columns of a sampling plan a number of times. If the plan is
a Latin hypercube, the result of this operation will also be a Latin
hypercube.
Inputs:
X - sampling plan
PertNum - the number of changes (perturbations) to be made to X.
Output:
X - perturbed sampling plan
"""
X_pert = X.copy()
[n,k] = np.shape(X_pert)
for pert_count in range(0,PertNum):
col = int(m.floor(np.random.rand(1)*k))
#Choosing two distinct random points
el1 = 0
el2 = 0
while el1 == el2:
el1 = int(m.floor(np.random.rand(1)*n))
el2 = int(m.floor(np.random.rand(1)*n))
#swap the two chosen elements
arrbuffer = X_pert[el1,col]
X_pert[el1,col] = X_pert[el2,col]
X_pert[el2,col] = arrbuffer
return X_pert
def mmlhs(self, X_start, population,iterations, q):
"""
Evolutionary operation search for the most space filling Latin hypercube
of a certain size and dimensionality. There is no need to call this
directly - use bestlh.m
"""
X_s = X_start.copy()
n = np.size(X_s,0)
X_best = X_s
Phi_best = self.mmphi(X_best)
leveloff = m.floor(0.85*iterations)
for it in range(0,iterations):
if it < leveloff:
mutations = int(round(1+(0.5*n-1)*(leveloff-it)/(leveloff-1)))
else:
mutations = 1
X_improved = X_best
Phi_improved = Phi_best
for offspring in range(0,population):
X_try = self.perturb(X_best, mutations)
Phi_try = self.mmphi(X_try, q)
if Phi_try < Phi_improved:
X_improved = X_try
Phi_improved = Phi_try
if Phi_improved < Phi_best:
X_best = X_improved
Phi_best = Phi_improved
return X_best
def mmphi(self,X,q=2,p=1):
"""
Calculates the sampling plan quality criterion of Morris and Mitchell
Inputs:
X - Sampling plan
q - exponent used in the calculation of the metric (default = 2)
p - the distance metric to be used (p=1 rectangular - default , p=2 Euclidean)
Output:
Phiq - sampling plan 'space-fillingness' metric
"""
#calculate the distances between all pairs of
#points (using the p-norm) and build multiplicity array J
J,d = self.jd(X,p)
#the sampling plan quality criterion
Phiq = (np.sum(J*(d**(-q))))**(1.0/q)
return Phiq
def jd(self, X,p=1):
"""
Computes the distances between all pairs of points in a sampling plan
X using the p-norm, sorts them in ascending order and removes multiple occurences.
Inputs:
X-sampling plan being evaluated
p-distance norm (p=1 rectangular-default, p=2 Euclidean)
Output:
J-multiplicity array (that is, the number of pairs separated by each distance value)
distinct_d-list of distinct distance values
"""
#number of points in the sampling plan
n = np.size(X[:,1])
#computes the distances between all pairs of points
d = np.zeros((n*(n-1)//2))
# for i in xrange(n-1):
# for j in xrange(i+1,n):
# if i == 0:
# d[i+j-1] = np.linalg.norm((rld[0,:]-rld[j,:]),2)
# else:
# d[((i-1)*n - (i-1)*i/2 + j - i )] = np.linalg.norm((X[i,:] - X[j,:]),2)
#an alternative way of the above loop
list = [(i,j) for i in range(n-1) for j in range(i+1,n)]
for k,l in enumerate(list):
d[k] = np.linalg.norm((X[l[0],:]-X[l[1],:]),p)
#remove multiple occurences
distinct_d, J = np.unique(d, return_counts=True)
return J, distinct_d
def mm(self,X1,X2,p=1):
"""
Given two sampling plans chooses the one with the better space-filling properties
(as per the Morris-Mitchell criterion)
Inputs:
X1,X2-the two sampling plans
p- the distance metric to be used (p=1 rectangular-default, p=2 Euclidean)
Outputs:
Mmplan-if Mmplan=0, identical plans or equally space-
filling, if Mmplan=1, X1 is more space filling, if Mmplan=2,
X2 is more space filling
"""
#thats how two arrays are compared in their sorted form
v = np.sort(X1) == np.sort(X2)
if v.all() == True:#if True, then the designs are the same
# if np.array_equal(X1,X2) == True:
return 0
else:
#calculate the distance and multiplicity arrays
[J1 , d1] = self.jd(X1,p);m1=len(d1)
[J2 , d2] = self.jd(X2,p);m2=len(d2)
#blend the distance and multiplicity arrays together for
#comparison according to definition 1.2B. Note the different
#signs - we are maximising the d's and minimising the J's.
V1 = np.zeros((2*m1))
V1[0:len(V1):2] = d1
V1[1:len(V1):2] = -J1
V2 = np.zeros((2*m2))
V2[0:len(V2):2] = d2
V2[1:len(V2):2] = -J2
#the longer vector can be trimmed down to the length of the shorter one
m = min(m1,m2)
V1 = V1[0:m]
V2 = V2[0:m]
#generate vector c such that c(i)=1 if V1(i)>V2(i), c(i)=2 if V1(i)<V2(i)
#c(i)=0 otherwise
c = np.zeros(m)
for i in range(m):
if np.greater(V1[i],V2[i]) == True:
c[i] = 1
elif np.less(V1[i],V2[i]) == True:
c[i] = 2
elif np.equal(V1[i],V2[i]) == True:
c[i] = 0
#If the plans are not identical but have the same space-filling
#properties
if sum(c) == 0:
return 0
else:
#the more space-filling design (mmplan)
#is the first non-zero element of c
i = 0
while c[i] == 0:
i = i+1
return c[i]
if __name__=='__main__':
# print fullfactorial2d(2)
# print fullfactorial3d(2)
# print fullfactorial4d(2)
# print fullfactorial5d(2)
# print optimalLHC()
sp = samplingplan(k=2)
print(sp.fullfactorial())
print(sp.rlh(15))
print(sp.optimallhc(16))
| {
"repo_name": "capaulson/pyKriging",
"path": "pyKriging/samplingplan.py",
"copies": "1",
"size": "11545",
"license": "mit",
"hash": 6596410530401962000,
"line_mean": 31.9857142857,
"line_max": 148,
"alpha_frac": 0.5255088783,
"autogenerated": false,
"ratio": 3.6191222570532915,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9595258959734186,
"avg_score": 0.009874435123820993,
"num_lines": 350
} |
__author__ = 'Chris'
import wx
from gooey.gui import styling
class CalendarDlg(wx.Dialog):
def __init__(self, parent):
wx.Dialog.__init__(self, parent)
self.SetBackgroundColour('#ffffff')
self.ok_button = wx.Button(self, label='Ok')
self.datepicker = wx.DatePickerCtrl(self, style=wx.DP_DROPDOWN)
vertical_container = wx.BoxSizer(wx.VERTICAL)
vertical_container.AddSpacer(10)
vertical_container.Add(styling.H1(self, label='Select a Date'), 0, wx.LEFT | wx.RIGHT, 15)
vertical_container.AddSpacer(10)
vertical_container.Add(self.datepicker, 0, wx.EXPAND | wx.LEFT | wx.RIGHT, 15)
vertical_container.AddSpacer(10)
button_sizer = wx.BoxSizer(wx.HORIZONTAL)
button_sizer.AddStretchSpacer(1)
button_sizer.Add(self.ok_button, 0)
vertical_container.Add(button_sizer, 0, wx.LEFT | wx.RIGHT, 15)
vertical_container.AddSpacer(20)
self.SetSizerAndFit(vertical_container)
self.Bind(wx.EVT_BUTTON, self.OnOkButton, self.ok_button)
def OnOkButton(self, event):
self.Close()
return wx.ID_OK
def OnCancellButton(self, event):
try:
return None
except:
self.Close()
def GetPath(self):
return str(self.datepicker.GetValue()).split(' ')[0]
| {
"repo_name": "jonathanlurie/timelapseComposer",
"path": "lib/python/gooey/gui/widgets/calender_dialog.py",
"copies": "2",
"size": "1284",
"license": "mit",
"hash": -4004475343679557000,
"line_mean": 27.1818181818,
"line_max": 94,
"alpha_frac": 0.6651090343,
"autogenerated": false,
"ratio": 3.267175572519084,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4932284606819084,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Chris'
import wx
from gooey.gui import styling
from gooey.gui.widgets.calender_dialog import CalendarDlg
class AbstractChooser(object):
def __init__(self, data):
self.data = data
# parent
self.panel = None
self.button_text = 'Browse'
# Widgets
self.title = None
self.help_msg = None
self.text_box = None
self.button = None
self.panel = None
def build(self, parent):
return self.do_layout(parent)
def do_layout(self, parent):
self.panel = wx.Panel(parent)
self.title = self.CreateNameLabelWidget(self.panel)
self.help_msg = self.CreateHelpMsgWidget(self.panel)
self.text_box = wx.TextCtrl(self.panel)
self.button = wx.Button(self.panel, label=self.button_text, size=(73, 23))
vertical_container = wx.BoxSizer(wx.VERTICAL)
widget_sizer = wx.BoxSizer(wx.HORIZONTAL)
vertical_container.Add(self.title)
vertical_container.AddSpacer(2)
if self.help_msg.GetLabelText():
vertical_container.Add(self.help_msg, 1, wx.EXPAND)
vertical_container.AddSpacer(2)
else:
vertical_container.AddStretchSpacer(1)
widget_sizer.Add(self.text_box, 1, wx.EXPAND)
widget_sizer.AddSpacer(10)
widget_sizer.Add(self.button, 0)
vertical_container.Add(widget_sizer, 0, wx.EXPAND)
self.panel.SetSizer(vertical_container)
self.panel.Bind(wx.EVT_SIZE, self.OnResize)
self.panel.Bind(wx.EVT_BUTTON, self.on_button, self.button)
return self.panel
def CreateHelpMsgWidget(self, parent):
base_text = wx.StaticText(parent, label=self.data['help_msg'])
# if self.data['nargs']:
# base_text.SetLabelText(base_text.GetLabelText() + self.CreateNargsMsg(action))
styling.MakeDarkGrey(base_text)
return base_text
def CreateNameLabelWidget(self, parent):
label = self.data['title'].title()
text = wx.StaticText(parent, label=label)
styling.MakeBold(text)
return text
def OnResize(self, evt):
if self.help_msg is None:
return
container_width, _ = self.panel.Size
text_width, _ = self.help_msg.Size
if text_width != container_width:
self.help_msg.SetLabel(self.help_msg.GetLabelText().replace('\n', ' '))
self.help_msg.Wrap(container_width)
evt.Skip()
def on_button(self, evt):
raise NotImplementedError
class FileChooser(AbstractChooser):
def __init__(self, data):
AbstractChooser.__init__(self, data)
def on_button(self, evt):
dlg = wx.FileDialog(self.panel, style=wx.FD_OPEN | wx.FD_FILE_MUST_EXIST)
result = (dlg.GetPath()
if dlg.ShowModal() == wx.ID_OK
else None)
if result:
self.text_box.SetLabelText(result)
class DirectoryChooser(AbstractChooser):
def __init__(self, data):
AbstractChooser.__init__(self, data)
def on_button(self, evt):
dlg = wx.DirDialog(self.panel, 'Select directory', style=wx.DD_DEFAULT_STYLE)
result = (dlg.GetPath()
if dlg.ShowModal() == wx.ID_OK
else None)
if result:
self.text_box.SetLabelText(result)
class CalendarChooser(AbstractChooser):
def __init__(self, data):
AbstractChooser.__init__(self, data)
self.button_text = 'Choose Date'
def on_button(self, evt):
dlg = CalendarDlg(self.panel)
dlg.ShowModal()
if dlg.GetPath():
self.text_box.SetLabelText(dlg.GetPath())
| {
"repo_name": "jonathanlurie/timelapseComposer",
"path": "lib/python/gooey/gui/widgets/choosers.py",
"copies": "2",
"size": "3529",
"license": "mit",
"hash": -8044580885041872000,
"line_mean": 23.0283687943,
"line_max": 86,
"alpha_frac": 0.6370076509,
"autogenerated": false,
"ratio": 3.439571150097466,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5076578800997466,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Chris'
import wx
from gooey.gui.util import wx_util
class CalendarDlg(wx.Dialog):
def __init__(self, parent):
wx.Dialog.__init__(self, parent)
self.SetBackgroundColour('#ffffff')
self.ok_button = wx.Button(self, label='Ok')
self.datepicker = wx.DatePickerCtrl(self, style=wx.DP_DROPDOWN)
vertical_container = wx.BoxSizer(wx.VERTICAL)
vertical_container.AddSpacer(10)
vertical_container.Add(wx_util.h1(self, label='Select a Date'), 0, wx.LEFT | wx.RIGHT, 15)
vertical_container.AddSpacer(10)
vertical_container.Add(self.datepicker, 0, wx.EXPAND | wx.LEFT | wx.RIGHT, 15)
vertical_container.AddSpacer(10)
button_sizer = wx.BoxSizer(wx.HORIZONTAL)
button_sizer.AddStretchSpacer(1)
button_sizer.Add(self.ok_button, 0)
vertical_container.Add(button_sizer, 0, wx.LEFT | wx.RIGHT, 15)
vertical_container.AddSpacer(20)
self.SetSizerAndFit(vertical_container)
self.Bind(wx.EVT_BUTTON, self.OnOkButton, self.ok_button)
def OnOkButton(self, event):
self.Close()
return wx.ID_OK
def OnCancellButton(self, event):
try:
return None
except:
self.Close()
def GetPath(self):
return str(self.datepicker.GetValue()).split(' ')[0]
| {
"repo_name": "tmr232/Gooey",
"path": "gooey/gui/widgets/calender_dialog.py",
"copies": "5",
"size": "1291",
"license": "mit",
"hash": 5492876429048084000,
"line_mean": 26.6888888889,
"line_max": 94,
"alpha_frac": 0.6630518978,
"autogenerated": false,
"ratio": 3.251889168765743,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6414941066565742,
"avg_score": null,
"num_lines": null
} |
__author__ = 'Chris'
"""
Preps the extracted Python code so that it can be evaled by the
monkey_parser
"""
from itertools import *
source = '''
import sys
import os
import doctest
import cProfile
import pstats
from argparse import ArgumentParser
from argparse import RawDescriptionHelpFormatter
from gooey import Gooey
parser = ArgumentParser(description='Example Argparse Program', formatter_class=RawDescriptionHelpFormatter)
parser.add_argument('filename', help='filename')
parser.add_argument('-r', '--recursive', dest='recurse', action='store_true', help='recurse into subfolders [default: %(default)s]')
parser.add_argument('-v', '--verbose', dest='verbose', action='count', help='set verbosity level [default: %(default)s]')
parser.add_argument('-i', '--include', action='append', help='only include paths matching this regex pattern. Note: exclude is given preference over include. [default: %(default)s]', metavar='RE')
parser.add_argument('-m', '--mycoolargument', help='mycoolargument')
parser.add_argument('-e', '--exclude', dest='exclude', help='exclude paths matching this regex pattern. [default: %(default)s]', metavar='RE')
parser.add_argument('-V', '--version', action='version')
parser.add_argument('-T', '--tester', choices=['yes', 'no'])
parser.add_argument(dest='paths', help='paths to folder(s) with source file(s) [default: %(default)s]', metavar='path', nargs='+')
'''
def take_imports(code):
return takewhile(lambda line: 'import' in line, code)
def drop_imports(code):
return dropwhile(lambda line: 'import' in line, code)
def split_line(line):
# splits an assignment statement into varname and command strings
# in: "parser = ArgumentParser(description='Example Argparse Program')"
# out: "parser", "ArgumentParser(description='Example Argparse Program"
variable, instruction = line.split('=', 1)
return variable.strip(), instruction.strip()
def update_parser_varname(new_varname, code):
# lines = source.split('\n')[1:]
lines = filter(lambda x: x != '', code)
argparse_code = dropwhile(lambda line: 'import' in line, lines)
old_argparser_varname, _ = split_line(argparse_code.next())
updated_code = [line.replace(old_argparser_varname, new_varname)
for line in lines]
return updated_code
if __name__ == '__main__':
pass
| {
"repo_name": "intfrr/Gooey",
"path": "gooey/python_bindings/code_prep.py",
"copies": "10",
"size": "2371",
"license": "mit",
"hash": -2130466036749207800,
"line_mean": 36.868852459,
"line_max": 196,
"alpha_frac": 0.6942218473,
"autogenerated": false,
"ratio": 3.7163009404388716,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9410522787738871,
"avg_score": null,
"num_lines": null
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.