id int64 0 190k | prompt stringlengths 21 13.4M | docstring stringlengths 1 12k ⌀ |
|---|---|---|
168,072 | import os
import boto3
from flask import Flask, jsonify, make_response, request
dynamodb_client = boto3.client('dynamodb')
USERS_TABLE = os.environ['USERS_TABLE']
def get_user(user_id):
result = dynamodb_client.get_item(
TableName=USERS_TABLE, Key={'userId': {'S': user_id}}
)
item = result.get('Item')
if not item:
return jsonify({'error': 'Could not find user with provided "userId"'}), 404
return jsonify(
{'userId': item.get('userId').get('S'), 'name': item.get('name').get('S')}
) | null |
168,073 | import os
import boto3
from flask import Flask, jsonify, make_response, request
dynamodb_client = boto3.client('dynamodb')
USERS_TABLE = os.environ['USERS_TABLE']
def create_user():
user_id = request.json.get('userId')
name = request.json.get('name')
if not user_id or not name:
return jsonify({'error': 'Please provide both "userId" and "name"'}), 400
dynamodb_client.put_item(
TableName=USERS_TABLE, Item={'userId': {'S': user_id}, 'name': {'S': name}}
)
return jsonify({'userId': user_id, 'name': name}) | null |
168,074 | import os
import boto3
from flask import Flask, jsonify, make_response, request
def resource_not_found(e):
return make_response(jsonify(error='Not found!'), 404) | null |
168,075 | import os
import boto3
dynamodb = boto3.resource('dynamodb')
def delete(event, context):
table = dynamodb.Table(os.environ['DYNAMODB_TABLE'])
# delete the todo from the database
table.delete_item(
Key={
'id': event['pathParameters']['id']
}
)
# create a response
response = {
"statusCode": 200
}
return response | null |
168,076 | import json
import time
import logging
import os
from todos import decimalencoder
import boto3
dynamodb = boto3.resource('dynamodb')
def update(event, context):
data = json.loads(event['body'])
if 'text' not in data or 'checked' not in data:
logging.error("Validation Failed")
raise Exception("Couldn't update the todo item.")
return
timestamp = int(time.time() * 1000)
table = dynamodb.Table(os.environ['DYNAMODB_TABLE'])
# update the todo in the database
result = table.update_item(
Key={
'id': event['pathParameters']['id']
},
ExpressionAttributeNames={
'#todo_text': 'text',
},
ExpressionAttributeValues={
':text': data['text'],
':checked': data['checked'],
':updatedAt': timestamp,
},
UpdateExpression='SET #todo_text = :text, '
'checked = :checked, '
'updatedAt = :updatedAt',
ReturnValues='ALL_NEW',
)
# create a response
response = {
"statusCode": 200,
"body": json.dumps(result['Attributes'],
cls=decimalencoder.DecimalEncoder)
}
return response | null |
168,077 | import os
import json
from todos import decimalencoder
import boto3
dynamodb = boto3.resource('dynamodb')
def get(event, context):
table = dynamodb.Table(os.environ['DYNAMODB_TABLE'])
# fetch todo from the database
result = table.get_item(
Key={
'id': event['pathParameters']['id']
}
)
# create a response
response = {
"statusCode": 200,
"body": json.dumps(result['Item'],
cls=decimalencoder.DecimalEncoder)
}
return response | null |
168,078 | import json
import logging
import os
import time
import uuid
import boto3
dynamodb = boto3.resource('dynamodb')
def create(event, context):
data = json.loads(event['body'])
if 'text' not in data:
logging.error("Validation Failed")
raise Exception("Couldn't create the todo item.")
timestamp = str(time.time())
table = dynamodb.Table(os.environ['DYNAMODB_TABLE'])
item = {
'id': str(uuid.uuid1()),
'text': data['text'],
'checked': False,
'createdAt': timestamp,
'updatedAt': timestamp,
}
# write the todo to the database
table.put_item(Item=item)
# create a response
response = {
"statusCode": 200,
"body": json.dumps(item)
}
return response | null |
168,079 | import json
import os
from todos import decimalencoder
import boto3
dynamodb = boto3.resource('dynamodb')
def list(event, context):
table = dynamodb.Table(os.environ['DYNAMODB_TABLE'])
# fetch all todos from the database
result = table.scan()
# create a response
response = {
"statusCode": 200,
"body": json.dumps(result['Items'], cls=decimalencoder.DecimalEncoder)
}
return response | null |
168,080 | import json
import telegram
import os
import logging
logger = logging.getLogger()
if logger.handlers:
for handler in logger.handlers:
logger.removeHandler(handler)
OK_RESPONSE = {
'statusCode': 200,
'headers': {'Content-Type': 'application/json'},
'body': json.dumps('ok')
}
ERROR_RESPONSE = {
'statusCode': 400,
'body': json.dumps('Oops, something went wrong!')
}
def configure_telegram():
"""
Configures the bot with a Telegram Token.
Returns a bot instance.
"""
TELEGRAM_TOKEN = os.environ.get('TELEGRAM_TOKEN')
if not TELEGRAM_TOKEN:
logger.error('The TELEGRAM_TOKEN must be set')
raise NotImplementedError
return telegram.Bot(TELEGRAM_TOKEN)
def webhook(event, context):
"""
Runs the Telegram webhook.
"""
bot = configure_telegram()
logger.info('Event: {}'.format(event))
if event.get('requestContext', {}).get('http', {}).get('method') == 'POST' and event.get('body'):
logger.info('Message received')
update = telegram.Update.de_json(json.loads(event.get('body')), bot)
chat_id = update.message.chat.id
text = update.message.text
if text == '/start':
text = """Hello, human! I am an echo bot, built with Python and the Serverless Framework.
You can take a look at my source code here: https://github.com/jonatasbaldin/serverless-telegram-bot.
If you have any issues, please drop a tweet to my creator: https://twitter.com/jonatsbaldin. Happy botting!"""
bot.sendMessage(chat_id=chat_id, text=text)
logger.info('Message sent')
return OK_RESPONSE
return ERROR_RESPONSE
The provided code snippet includes necessary dependencies for implementing the `set_webhook` function. Write a Python function `def set_webhook(event, context)` to solve the following problem:
Sets the Telegram bot webhook.
Here is the function:
def set_webhook(event, context):
"""
Sets the Telegram bot webhook.
"""
logger.info('Event: {}'.format(event))
bot = configure_telegram()
url = 'https://{}/{}/'.format(
event.get('headers').get('host'),
event.get('requestContext').get('stage'),
)
webhook = bot.set_webhook(url)
if webhook:
return OK_RESPONSE
return ERROR_RESPONSE | Sets the Telegram bot webhook. |
168,081 | import json
import sys
from solidgpt.definitions import *
def load_from_json(filename="data.json"):
# Load data from a JSON file
with open(filename, "r") as json_file:
loaded_data = json.load(json_file)
return loaded_data | null |
168,082 | import json
import sys
from solidgpt.definitions import *
def create_directories_if_not_exist(filepath: str):
dir_name = os.path.dirname(filepath)
if not os.path.exists(dir_name):
os.makedirs(dir_name)
return
def save_to_md(filename, content: str, path = "") -> str:
create_directories_if_not_exist(filename)
path = os.path.join(ROOT_DIR, path)
full_path = os.path.join(path, filename)
with open(full_path, "w") as md_file:
md_file.write(content)
logging.info(f"Information saved to {full_path}")
return full_path | null |
168,083 | import json
import sys
from solidgpt.definitions import *
def create_directories_if_not_exist(filepath: str):
dir_name = os.path.dirname(filepath)
if not os.path.exists(dir_name):
os.makedirs(dir_name)
return
def add_extension_if_not_exist(input_string, extension):
if not input_string.endswith(extension):
return input_string + extension
else:
return input_string
def save_to_md2(filename, content: str) -> str:
create_directories_if_not_exist(filename)
full_path = filename
full_path = add_extension_if_not_exist(full_path, ".md")
with open(full_path, "w") as md_file:
md_file.write(content)
md_file.flush()
logging.info(f"Information saved to {full_path}")
return full_path | null |
168,084 | import json
import sys
from solidgpt.definitions import *
def create_directories_if_not_exist(filepath: str):
dir_name = os.path.dirname(filepath)
if not os.path.exists(dir_name):
os.makedirs(dir_name)
return
def add_extension_if_not_exist(input_string, extension):
if not input_string.endswith(extension):
return input_string + extension
else:
return input_string
def save_to_yaml(filename, content: str) -> str:
create_directories_if_not_exist(filename)
full_path = filename
full_path = add_extension_if_not_exist(full_path, ".yaml")
with open(full_path, "w", encoding='utf-8') as md_file:
md_file.write(content)
md_file.flush()
logging.info(f"Information saved to {full_path}")
return full_path | null |
168,085 | import json
import sys
from solidgpt.definitions import *
def create_directories_if_not_exist(filepath: str):
dir_name = os.path.dirname(filepath)
if not os.path.exists(dir_name):
os.makedirs(dir_name)
return
def add_extension_if_not_exist(input_string, extension):
if not input_string.endswith(extension):
return input_string + extension
else:
return input_string
def save_to_text(filename, content):
create_directories_if_not_exist(filename)
full_path = filename
full_path = add_extension_if_not_exist(full_path, ".txt")
with open(full_path, "w", encoding='utf-8') as txt_file:
txt_file.write(content)
logging.info(f"Information saved to {full_path}")
return full_path | null |
168,086 | import json
import sys
from solidgpt.definitions import *
def add_extension_if_not_exist(input_string, extension):
def load_from_text(filename, path = "", extension = ".md") -> str:
full_path = os.path.join(path, filename)
full_path = add_extension_if_not_exist(full_path, extension)
with open(full_path, "r") as md_file:
content = md_file.read()
logging.info(f"Information loaded from {full_path}")
return content | null |
168,087 | import json
import sys
from solidgpt.definitions import *
def same_string(s1: str, s2: str, case_sensitive: bool = False):
if case_sensitive:
return s1 == s2
return s1.lower() == s2.lower() | null |
168,088 | import json
import sys
from solidgpt.definitions import *
def print_error_message(message):
print(f"Error: {message}", file=sys.stderr) | null |
168,089 | import json
import sys
from solidgpt.definitions import *
def delete_directory_contents(directory):
for root, dirs, files in os.walk(directory, topdown=False):
for file in files:
file_path = os.path.join(root, file)
try:
os.remove(file_path)
print(f"Deleted file: {file_path}")
except Exception as e:
print(f"Error deleting file {file_path}: {str(e)}")
for dir_name in dirs:
dir_path = os.path.join(root, dir_name)
try:
os.rmdir(dir_path)
print(f"Deleted directory: {dir_path}")
except Exception as e:
print(f"Error deleting directory {dir_path}: {str(e)}") | null |
168,090 | import os
import re
import sys
import argparse
import webbrowser
from threading import Timer
from websvc.app import app
http_host = "0.0.0.0"
http_port = 5000
def open_browser() -> None:
webbrowser.open_new(f"http://{http_host}:{http_port}/") | null |
168,091 | import dash_bootstrap_components as dbc
from dash import dcc, html, Input, Output, State, MATCH, callback
from models.telegram import Wrapper
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.clean_data_folder()
The provided code snippet includes necessary dependencies for implementing the `update_buttons` function. Write a Python function `def update_buttons(pair, value)` to solve the following problem:
show/hide control buttons
Here is the function:
def update_buttons(pair, value):
"""show/hide control buttons"""
if pair is not None:
tg_wrapper.helper.read_data(pair)
if "margin" in tg_wrapper.helper.data:
if value == "":
return tg_wrapper.helper.data["margin"] != " "
else:
return tg_wrapper.helper.data["margin"] == value
return "false" | show/hide control buttons |
168,092 | import dash_bootstrap_components as dbc
from dash import dcc, html, Input, Output, State, MATCH, callback
from models.telegram import Wrapper
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.clean_data_folder()
The provided code snippet includes necessary dependencies for implementing the `btn_schedule_click` function. Write a Python function `def btn_schedule_click(add_click, remove_click, open_click)` to solve the following problem:
Add scanner/screen schedule
Here is the function:
def btn_schedule_click(add_click, remove_click, open_click):
"""Add scanner/screen schedule"""
if add_click > 0:
tg_wrapper._handler._check_scheduled_job()
return True, False, 0, 0
if remove_click > 0:
tg_wrapper._handler._remove_scheduled_job()
return False, True, 0, 0
if open_click > 0:
if tg_wrapper.check_schedule_running():
return True, False, 0, 0
else:
return False, True, 0, 0 | Add scanner/screen schedule |
168,093 | import dash_bootstrap_components as dbc
from dash import dcc, html, Input, Output, State, MATCH, callback
from models.telegram import Wrapper
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.clean_data_folder()
The provided code snippet includes necessary dependencies for implementing the `btn_buy_click` function. Write a Python function `def btn_buy_click(click, market)` to solve the following problem:
Place a buy order
Here is the function:
def btn_buy_click(click, market):
"""Place a buy order"""
if click > 0:
tg_wrapper.place_market_buy_order(market)
return html.Label() | Place a buy order |
168,094 | import dash_bootstrap_components as dbc
from dash import dcc, html, Input, Output, State, MATCH, callback
from models.telegram import Wrapper
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.clean_data_folder()
The provided code snippet includes necessary dependencies for implementing the `btn_sell_click` function. Write a Python function `def btn_sell_click(click, market)` to solve the following problem:
Place a sell order
Here is the function:
def btn_sell_click(click, market):
"""Place a sell order"""
if click > 0:
tg_wrapper.place_market_sell_order(market)
return html.Label() | Place a sell order |
168,095 | import dash_bootstrap_components as dbc
from dash import dcc, html, Input, Output, State, MATCH, callback
from models.telegram import Wrapper
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.clean_data_folder()
The provided code snippet includes necessary dependencies for implementing the `btn_open_orders` function. Write a Python function `def btn_open_orders(click)` to solve the following problem:
restart pairs with open orders
Here is the function:
def btn_open_orders(click):
"""restart pairs with open orders"""
if click > 0:
tg_wrapper.restart_open_order_pairs()
return "true" | restart pairs with open orders |
168,096 | import dash_bootstrap_components as dbc
from dash import dcc, html, Input, Output, State, MATCH, callback
from models.telegram import Wrapper
scan_layoutv2 = html.Div(
[
dbc.Row(
dbc.Col(
html.Div(
html.H5("Options", style={"textAlign": "center"}),
className="d-grid gap-2",
),
width={"size": 6, "offset": 3},
)
),
dbc.Row(
dbc.Col(
[
html.Div(
html.Button(
"Add Schedule",
hidden=True,
id="btn-add-schedule",
n_clicks=0,
name="add",
className="btn btn-primary",
),
className="d-grid gap-2",
),
html.Div(
html.Button(
"Remove Schedule",
hidden=False,
id="btn-remove-schedule",
n_clicks=0,
name="remove",
className="btn btn-primary",
),
className="d-grid gap-2",
),
],
md={"size": 12, "offset": 0},
lg={"size": 10, "offset": 1},
)
),
html.P(),
dbc.Row(
[
dbc.Col(
html.Div(
html.Button(
"Scan Only",
id="btn-scan-only",
n_clicks=0,
className="btn btn-primary",
),
className="d-grid gap-2",
),
md={"size": 6, "offset": 0},
lg={"size": 5, "offset": 1},
),
html.Br(),
dbc.Col(
html.Div(
html.Button(
"Start Bots Only",
id="btn-start-only",
n_clicks=0,
className="btn btn-primary",
),
className="d-grid gap-2",
),
md={"size": 6},
lg={"size": 5},
),
]
),
html.P(),
dbc.Row(
dbc.Col(
html.Div(
html.Button(
"Scan and Start Bots",
id="btn-scan-start",
n_clicks=0,
className="btn btn-primary",
),
className="d-grid gap-2",
),
md={"size": 12, "offset": 0},
lg={"size": 10, "offset": 1},
)
),
]
)
The provided code snippet includes necessary dependencies for implementing the `btn_start_scanning_click` function. Write a Python function `def btn_start_scanning_click(click)` to solve the following problem:
show scan options
Here is the function:
def btn_start_scanning_click(click):
"""show scan options"""
if click > 0:
return scan_layoutv2 | show scan options |
168,097 | import dash_bootstrap_components as dbc
from dash import dcc, html, Input, Output, State, MATCH, callback
from models.telegram import Wrapper
The provided code snippet includes necessary dependencies for implementing the `toggle_options_collapse` function. Write a Python function `def toggle_options_collapse(n, is_open)` to solve the following problem:
toggle scan option collapsible
Here is the function:
def toggle_options_collapse(n, is_open):
"""toggle scan option collapsible"""
if n:
return not is_open
return is_open | toggle scan option collapsible |
168,098 | import dash_bootstrap_components as dbc
from dash import dcc, html, Input, Output, State, MATCH, callback
from models.telegram import Wrapper
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.clean_data_folder()
def start_scan_and_bots(clicks): # pylint: disable=missing-function-docstring
if clicks > 0:
tg_wrapper.start_market_scanning()
return "true" | null |
168,099 | import dash_bootstrap_components as dbc
from dash import dcc, html, Input, Output, State, MATCH, callback
from models.telegram import Wrapper
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.clean_data_folder()
def start_scan_only(clicks): # pylint: disable=missing-function-docstring
if clicks > 0:
tg_wrapper.start_market_scanning(True, False)
return "true" | null |
168,100 | import dash_bootstrap_components as dbc
from dash import dcc, html, Input, Output, State, MATCH, callback
from models.telegram import Wrapper
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.clean_data_folder()
def start_bots_only(clicks): # pylint: disable=missing-function-docstring
if clicks > 0:
tg_wrapper.start_market_scanning(False, True)
return "true" | null |
168,101 | import dash_bootstrap_components as dbc
from dash import dcc, html, Input, Output, State, MATCH, callback
from models.telegram import Wrapper
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.clean_data_folder()
def btn_pause_click(click, market): # pylint: disable=missing-function-docstring
if click > 0:
tg_wrapper.pause_bot(market)
return "true" | null |
168,102 | import dash_bootstrap_components as dbc
from dash import dcc, html, Input, Output, State, MATCH, callback
from models.telegram import Wrapper
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.clean_data_folder()
def btn_resume_click(click, market): # pylint: disable=missing-function-docstring
if click > 0:
tg_wrapper.resume_bot(market)
return html.Label() | null |
168,103 | import dash_bootstrap_components as dbc
from dash import dcc, html, Input, Output, State, MATCH, callback
from models.telegram import Wrapper
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.clean_data_folder()
def btn_stop_click(click, market): # pylint: disable=missing-function-docstring
if click > 0:
tg_wrapper.stop_bot(market)
return html.Label() | null |
168,104 | import dash_bootstrap_components as dbc
from dash import dcc, html, Input, Output, State, MATCH, callback
from models.telegram import Wrapper
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.clean_data_folder()
The provided code snippet includes necessary dependencies for implementing the `btn_start_click` function. Write a Python function `def btn_start_click(click, market)` to solve the following problem:
start bot manually
Here is the function:
def btn_start_click(click, market):
"""start bot manually"""
if click > 0:
tg_wrapper.start_bot(market)
return html.Label() | start bot manually |
168,105 | import dash_bootstrap_components as dbc
from dash import dcc, html, Input, Output, State, MATCH, callback
from models.telegram import Wrapper
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.clean_data_folder()
The provided code snippet includes necessary dependencies for implementing the `update_start_list` function. Write a Python function `def update_start_list(clicks)` to solve the following problem:
update manual start bot list
Here is the function:
def update_start_list(clicks):
"""update manual start bot list"""
acc_list = []
tg_wrapper.helper.read_data()
buttons = []
pair_count = 0
tg_wrapper.helper.read_data()
if "markets" in tg_wrapper.helper.data:
markets = tg_wrapper.helper.data["markets"]
for market in markets:
if not tg_wrapper.helper.is_bot_running(market):
buttons = []
buttons.append(
dbc.Button(
"Start",
id={"type": "btn-start", "index": pair_count},
n_clicks=0,
value=market,
className="btn btn-primary",
)
)
acc_list.append(
dbc.AccordionItem(
buttons,
title=f"{market} - stopped",
item_id=market,
class_name="justify-content-md-center",
)
)
pair_count += 1
accordion = html.Div(
dbc.Accordion(id="start-bots", children=acc_list, start_collapsed=True),
className="d-md-block",
)
return accordion | update manual start bot list |
168,106 | import dash_bootstrap_components as dbc
from dash import dcc, html, Input, Output, State, MATCH, callback
from models.telegram import Wrapper
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.clean_data_folder()
def get_bot_status(pair):
""" Get bot status for accordion heading """
if pair is not None:
return f"Uptime: {tg_wrapper.helper.get_uptime()} - Status: {tg_wrapper.helper.data['botcontrol']['status']} - Margin: {tg_wrapper.helper.data['margin']}"
The provided code snippet includes necessary dependencies for implementing the `update_accordions` function. Write a Python function `def update_accordions(clicks)` to solve the following problem:
create bot accordions
Here is the function:
def update_accordions(clicks):
"""create bot accordions"""
acc_list = []
pair_count = 0
for i in tg_wrapper.helper.get_all_bot_list():
tg_wrapper.helper.read_data(i)
state = "defaulted"
if "botcontrol" in tg_wrapper.helper.data:
state = tg_wrapper.helper.data["botcontrol"]["status"]
buttons = []
buttons.append(
dbc.Button(
"Stop",
id={"type": "btn-stop", "index": pair_count},
n_clicks=0,
value=i,
className="btn btn-primary",
)
)
if state not in ("paused", "stopped"):
buttons.append(
dbc.Button(
"Pause",
id={"type": "btn-pause", "index": pair_count},
n_clicks=0,
value=i,
className="btn btn-primary",
)
)
if state in ("paused"):
buttons.append(
dbc.Button(
"Resume",
id={"type": "btn-resume", "index": pair_count},
n_clicks=0,
value=i,
className="btn btn-primary",
)
)
if tg_wrapper.helper.data["margin"] == " ":
buttons.append(
dbc.Button(
"Buy",
id={"type": "btn-buy", "index": pair_count},
n_clicks=0,
value=i,
className="btn btn-primary",
)
)
else:
buttons.append(
dbc.Button(
"Sell",
id={"type": "btn-sell", "index": pair_count},
n_clicks=0,
value=i,
className="btn btn-primary",
)
)
acc_list.append(
dbc.AccordionItem(
buttons,
title=f"{i} - {get_bot_status(i)}",
item_id=i,
class_name="justify-content-md-center",
)
)
pair_count += 1
accordion = html.Div(
dbc.Accordion(id="bots", children=acc_list, start_collapsed=True),
className="d-grid gap-2",
)
return accordion | create bot accordions |
168,107 | import os
import dash_bootstrap_components as dbc
from dash import dcc, html, callback, Output, Input
def get_last_n_lines(file_name, N):
"""Get lines in file"""
# Create an empty list to keep the track of last N lines
list_of_lines = []
# Open file for reading in binary mode
with open(file_name, "rb") as read_obj:
# Move the cursor to the end of the file
read_obj.seek(0, os.SEEK_END)
# Create a buffer to keep the last read line
buffer = bytearray()
# Get the current position of pointer i.e eof
pointer_location = read_obj.tell()
# Loop till pointer reaches the top of the file
while pointer_location >= 0:
# Move the file pointer to the location pointed by pointer_location
read_obj.seek(pointer_location)
# Shift pointer location by -1
pointer_location = pointer_location - 1
# read that byte / character
new_byte = read_obj.read(1)
# If the read byte is new line character then it means one line is read
if new_byte == b"\n":
# Save the line in list of lines
list_of_lines.append(buffer.decode()[::-1])
# If the size of list reaches N, then return the reversed list
if len(list_of_lines) == N:
return list(reversed(list_of_lines))
# Reinitialize the byte array to save next line
buffer = bytearray()
else:
# If last read character is not eol then add it in buffer
buffer.extend(new_byte)
# As file is read completely, if there is still data in buffer, then its first line.
if len(buffer) > 0:
list_of_lines.append(buffer.decode()[::-1])
# return the reversed list
return list(reversed(list_of_lines))
The provided code snippet includes necessary dependencies for implementing the `read_log_file` function. Write a Python function `def read_log_file(n, active_tab)` to solve the following problem:
read log file updated
Here is the function:
def read_log_file(n, active_tab):
"""read log file updated"""
content = html.Div()
if active_tab is not None:
log_entries = (
str(get_last_n_lines(active_tab, 1000))
.replace("', '", "\n")
.replace("['", "")
.replace("']", "")
.replace("\\r", "")
)
# .replace("\\r", "\n").replace("'", "").replace(",","").replace("[", "").replace("]", "")
content = dbc.Card(
dbc.CardBody(
dcc.Textarea(
value=log_entries,
readOnly=True,
style={
"width": "100%",
"background": "black",
"color": "white",
"height": "100%",
},
draggable=False,
rows=20,
)
)
)
return content | read log file updated |
168,108 | import os
import dash_bootstrap_components as dbc
from dash import dcc, html, callback, Output, Input
The provided code snippet includes necessary dependencies for implementing the `get_log_content` function. Write a Python function `def get_log_content(n)` to solve the following problem:
read log files add names to dropdown
Here is the function:
def get_log_content(n):
"""read log files add names to dropdown"""
logs = []
jsonfiles = sorted(os.listdir(os.path.join("telegram_logs")))
for jfile in jsonfiles:
logs.append({"label": jfile, "value": os.path.join("telegram_logs", jfile)})
jsonfiles = sorted(os.listdir(os.path.join("logs")))
for jfile in jsonfiles:
if jfile.__contains__(".log"):
logs.append({"label": jfile, "value": os.path.join("logs", jfile)})
return logs | read log files add names to dropdown |
168,109 | from dash import dcc, html, Input, Output, callback, State
import dash_bootstrap_components as dbc
from models.telegram import Wrapper
from models.exchange import Granularity
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.read_config()
The provided code snippet includes necessary dependencies for implementing the `save_changes_buysize` function. Write a Python function `def save_changes_buysize( value, exchange, buysize, buymaxsize, buyminsize, preventloss, pl_trigger, pl_margin, tsl, tsl_trigger, tsl_margin, buynearhigh, buynearhigh_percent, sellatloss, nosellminpcnt, selllowerpcnt, )` to solve the following problem:
Save changes
Here is the function:
def save_changes_buysize(
value,
exchange,
buysize,
buymaxsize,
buyminsize,
preventloss,
pl_trigger,
pl_margin,
tsl,
tsl_trigger,
tsl_margin,
buynearhigh,
buynearhigh_percent,
sellatloss,
nosellminpcnt,
selllowerpcnt,
):
"""Save changes"""
if value > 0:
if "buysize" in buysize:
tg_wrapper.helper.config[exchange]["config"].update(
{"buymaxsize": buymaxsize}
)
tg_wrapper.helper.config[exchange]["config"].update(
{"buyminsize": buyminsize}
)
else:
if "buymaxsize" in tg_wrapper.helper.config[exchange]["config"]:
tg_wrapper.helper.config[exchange]["config"].pop("buymaxsize")
if "buyminsize" in tg_wrapper.helper.config[exchange]["config"]:
tg_wrapper.helper.config[exchange]["config"].pop("buyminsize")
if "preventloss" in preventloss:
tg_wrapper.helper.config[exchange]["config"].update(
{"preventlosstrigger": pl_trigger}
)
tg_wrapper.helper.config[exchange]["config"].update(
{"preventlossmargin": pl_margin}
)
if "trailingstoploss" in tsl:
tg_wrapper.helper.config[exchange]["config"].update(
{"trailingstoplosstrigger": tsl_trigger}
)
tg_wrapper.helper.config[exchange]["config"].update(
{"trailingstoploss": tsl_margin}
)
else:
if (
"trailingstoplosstrigger"
in tg_wrapper.helper.config[exchange]["config"]
):
tg_wrapper.helper.config[exchange]["config"].pop(
"trailingstoplosstrigger"
)
if "trailingstoploss" in tg_wrapper.helper.config[exchange]["config"]:
tg_wrapper.helper.config[exchange]["config"].pop("trailingstoploss")
if "disablebuynearhigh" in buynearhigh:
tg_wrapper.helper.config[exchange]["config"].update(
{"disablebuynearhigh": 1}
)
tg_wrapper.helper.config[exchange]["config"].update(
{"nobuynearhighpcnt": buynearhigh_percent}
)
else:
if "disablebuynearhigh" in tg_wrapper.helper.config[exchange]["config"]:
tg_wrapper.helper.config[exchange]["config"].pop("disablebuynearhigh")
if "nobuynearhighpcnt" in tg_wrapper.helper.config[exchange]["config"]:
tg_wrapper.helper.config[exchange]["config"].pop("nobuynearhighpcnt")
if "sellatloss" in sellatloss:
tg_wrapper.helper.config[exchange]["config"].update(
{"nosellminpcnt": nosellminpcnt}
)
tg_wrapper.helper.config[exchange]["config"].update(
{"selllowerpcnt": selllowerpcnt}
)
# else:
# if "nosellminpcnt" in tg_wrapper.helper.config[exchange]["config"]:
# tg_wrapper.helper.config[exchange]["config"].pop("nosellminpcnt")
# if "selllowerpcnt" in tg_wrapper.helper.config[exchange]["config"]:
# tg_wrapper.helper.config[exchange]["config"].pop("selllowerpcnt")
if tg_wrapper.helper.write_config():
return dbc.Alert(
"Config File Update - SUCCESS", color="success", dismissable=True
)
return dbc.Alert(
"Config File Update - FAILED", color="danger", dismissable=True
) | Save changes |
168,110 | from dash import dcc, html, Input, Output, callback, State
import dash_bootstrap_components as dbc
from models.telegram import Wrapper
from models.exchange import Granularity
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.read_config()
The provided code snippet includes necessary dependencies for implementing the `sellatloss_switch` function. Write a Python function `def sellatloss_switch(value, exchange)` to solve the following problem:
enable/disable buy size amount
Here is the function:
def sellatloss_switch(value, exchange):
"""enable/disable buy size amount"""
tg_wrapper.helper.config[exchange]["config"].update({"sellatloss": 0})
if "sellatloss" in value:
tg_wrapper.helper.config[exchange]["config"].update({"sellatloss": 1})
return False, False
return True, True | enable/disable buy size amount |
168,111 | from dash import dcc, html, Input, Output, callback, State
import dash_bootstrap_components as dbc
from models.telegram import Wrapper
from models.exchange import Granularity
The provided code snippet includes necessary dependencies for implementing the `buy_near_high_switch` function. Write a Python function `def buy_near_high_switch(value)` to solve the following problem:
enable/disable buy size amount
Here is the function:
def buy_near_high_switch(value):
"""enable/disable buy size amount"""
if "disablebuynearhigh" in value:
return False
return True | enable/disable buy size amount |
168,112 | from dash import dcc, html, Input, Output, callback, State
import dash_bootstrap_components as dbc
from models.telegram import Wrapper
from models.exchange import Granularity
The provided code snippet includes necessary dependencies for implementing the `buy_size_switch` function. Write a Python function `def buy_size_switch(value)` to solve the following problem:
enable/disable buy size amount
Here is the function:
def buy_size_switch(value):
"""enable/disable buy size amount"""
if "buysize" in value:
return False, False
return True, True | enable/disable buy size amount |
168,113 | from dash import dcc, html, Input, Output, callback, State
import dash_bootstrap_components as dbc
from models.telegram import Wrapper
from models.exchange import Granularity
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.read_config()
The provided code snippet includes necessary dependencies for implementing the `prevent_loss_switch` function. Write a Python function `def prevent_loss_switch(value, exchange)` to solve the following problem:
enable/disable prevent loss settings
Here is the function:
def prevent_loss_switch(value, exchange):
"""enable/disable prevent loss settings"""
tg_wrapper.helper.config[exchange]["config"].update({"preventloss": 0})
if "preventloss" in value:
tg_wrapper.helper.config[exchange]["config"].update({"preventloss": 1})
return False, False
return True, True | enable/disable prevent loss settings |
168,114 | from dash import dcc, html, Input, Output, callback, State
import dash_bootstrap_components as dbc
from models.telegram import Wrapper
from models.exchange import Granularity
The provided code snippet includes necessary dependencies for implementing the `trailing_stop_loss_switch` function. Write a Python function `def trailing_stop_loss_switch(value)` to solve the following problem:
enable/disable trailing stop loss settings
Here is the function:
def trailing_stop_loss_switch(value):
"""enable/disable trailing stop loss settings"""
if "trailingstoploss" in value:
return False, False
return True, True | enable/disable trailing stop loss settings |
168,115 | from dash import dcc, html, Input, Output, callback, State
import dash_bootstrap_components as dbc
from models.telegram import Wrapper
from models.exchange import Granularity
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.read_config()
The provided code snippet includes necessary dependencies for implementing the `exchange_selector` function. Write a Python function `def exchange_selector(value)` to solve the following problem:
Select Exchange
Here is the function:
def exchange_selector(value):
"""Select Exchange"""
enabled_list = []
tg_wrapper.helper.read_config()
if value is not None:
if value not in tg_wrapper.helper.config:
tg_wrapper.helper.config.update({value: {"config": {}}})
for param in tg_wrapper.helper.config[value]["config"]:
if tg_wrapper.helper.config[value]["config"][param] == 1:
enabled_list.append(param)
if (
"trailingstoplosstrigger" in tg_wrapper.helper.config[value]["config"]
and "trailingstoploss" in tg_wrapper.helper.config[value]["config"]
):
enabled_list.append("trailingstoploss")
if (
"buymaxsize" in tg_wrapper.helper.config[value]["config"]
or "buyminsize" in tg_wrapper.helper.config[value]["config"]
):
enabled_list.append("buysize")
return (
enabled_list,
enabled_list,
enabled_list,
enabled_list,
enabled_list,
enabled_list,
enabled_list,
enabled_list,
) | Select Exchange |
168,116 | from dash import dcc, html, Input, Output, callback, State
import dash_bootstrap_components as dbc
from models.telegram import Wrapper
from models.exchange import Granularity
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.read_config()
def sellatloss(value):
result = 0
margin = 0
if value is not None:
if value in tg_wrapper.helper.config:
if "nosellminpcnt" in tg_wrapper.helper.config[value]["config"]:
result = tg_wrapper.helper.config[value]["config"]["nosellminpcnt"]
if "selllowerpcnt" in tg_wrapper.helper.config[value]["config"]:
margin = tg_wrapper.helper.config[value]["config"]["selllowerpcnt"]
return result, margin | null |
168,117 | from dash import dcc, html, Input, Output, callback, State
import dash_bootstrap_components as dbc
from models.telegram import Wrapper
from models.exchange import Granularity
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.read_config()
def buy_near_high(value):
result = 0
if value is not None:
if value in tg_wrapper.helper.config:
if "nobuynearhighpcnt" in tg_wrapper.helper.config[value]["config"]:
result = tg_wrapper.helper.config[value]["config"]["nobuynearhighpcnt"]
return result | null |
168,118 | from dash import dcc, html, Input, Output, callback, State
import dash_bootstrap_components as dbc
from models.telegram import Wrapper
from models.exchange import Granularity
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.read_config()
def buy_max_size(value):
result = 0
if value is not None:
if value in tg_wrapper.helper.config:
if "buymaxsize" in tg_wrapper.helper.config[value]["config"]:
result = tg_wrapper.helper.config[value]["config"]["buymaxsize"]
return result | null |
168,119 | from dash import dcc, html, Input, Output, callback, State
import dash_bootstrap_components as dbc
from models.telegram import Wrapper
from models.exchange import Granularity
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.read_config()
def buy_min_size(value):
result = 0
if value is not None:
if value in tg_wrapper.helper.config:
if "buyminsize" in tg_wrapper.helper.config[value]["config"]:
result = tg_wrapper.helper.config[value]["config"]["buyminsize"]
return result | null |
168,120 | from dash import dcc, html, Input, Output, callback, State
import dash_bootstrap_components as dbc
from models.telegram import Wrapper
from models.exchange import Granularity
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.read_config()
class Granularity(Enum):
ONE_MINUTE = 60, "1m", "1min", "1T"
FIVE_MINUTES = 300, "5m", "5min", "5T"
FIFTEEN_MINUTES = 900, "15m", "15min", "15T"
THIRTY_MINUTES = 1800, "30m", "30min", "30T"
ONE_HOUR = 3600, "1h", "1hour", "1H"
SIX_HOURS = 21600, "6h", "6hour", "6H"
ONE_DAY = 86400, "1d", "1day", "1D"
def __init__(self, integer, short, medium, frequency):
self.integer = integer
self.short = short
self.medium = medium
self.frequency = frequency
def convert_to_enum(value):
for granularity in Granularity:
for enum_value in granularity.value:
if enum_value == value:
return granularity
raise ValueError("Invalid Granularity")
def to_short(self):
return self.short
def to_integer(self):
return self.integer
def to_medium(self):
return self.medium
def get_frequency(self):
return self.frequency
The provided code snippet includes necessary dependencies for implementing the `granularity_selector` function. Write a Python function `def granularity_selector(value)` to solve the following problem:
read granularityfrom config
Here is the function:
def granularity_selector(value):
"""read granularityfrom config"""
granularity = "ss"
if value is not None:
if value in tg_wrapper.helper.config:
if "granularity" in tg_wrapper.helper.config[value]["config"]:
granularity = Granularity.Granularity.convert_to_enum(
tg_wrapper.helper.config[value]["config"]["granularity"]
).to_integer
return granularity | read granularityfrom config |
168,121 | from dash import dcc, html, Input, Output, callback, State
import dash_bootstrap_components as dbc
from models.telegram import Wrapper
from models.exchange import Granularity
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.read_config()
The provided code snippet includes necessary dependencies for implementing the `trailing_stop_loss_trigger` function. Write a Python function `def trailing_stop_loss_trigger(value)` to solve the following problem:
read trailingstoplosstrigger from config
Here is the function:
def trailing_stop_loss_trigger(value):
"""read trailingstoplosstrigger from config"""
tsl_trigger = 0
tsl_margin = 0
if value is not None:
if value in tg_wrapper.helper.config:
if "trailingstoplosstrigger" in tg_wrapper.helper.config[value]["config"]:
tsl_trigger = tg_wrapper.helper.config[value]["config"][
"trailingstoplosstrigger"
]
if "trailingstoploss" in tg_wrapper.helper.config[value]["config"]:
tsl_margin = tg_wrapper.helper.config[value]["config"][
"trailingstoploss"
]
return tsl_trigger, tsl_margin | read trailingstoplosstrigger from config |
168,122 | from dash import dcc, html, Input, Output, callback, State
import dash_bootstrap_components as dbc
from models.telegram import Wrapper
from models.exchange import Granularity
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.read_config()
The provided code snippet includes necessary dependencies for implementing the `prevent_loss_trigger` function. Write a Python function `def prevent_loss_trigger(value)` to solve the following problem:
read preventloss from config
Here is the function:
def prevent_loss_trigger(value):
"""read preventloss from config"""
pl_trigger = 0
pl_margin = 0
if value is not None:
if value in tg_wrapper.helper.config:
if "preventlosstrigger" in tg_wrapper.helper.config[value]["config"]:
pl_trigger = tg_wrapper.helper.config[value]["config"][
"preventlosstrigger"
]
if "preventlossmargin" in tg_wrapper.helper.config[value]["config"]:
pl_margin = tg_wrapper.helper.config[value]["config"][
"preventlossmargin"
]
return pl_trigger, pl_margin | read preventloss from config |
168,123 | from dash import dcc, html, Input, Output, callback, State
import dash_bootstrap_components as dbc
from models.telegram import Wrapper
from models.exchange import Granularity
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.read_config()
The provided code snippet includes necessary dependencies for implementing the `switched` function. Write a Python function `def switched( enabled_list, sell_list, options_list, exchange, options, sell_options, extras_options, )` to solve the following problem:
Make config changes
Here is the function:
def switched(
enabled_list,
sell_list,
options_list,
exchange,
options,
sell_options,
extras_options,
):
"""Make config changes"""
if exchange is not None:
if exchange in tg_wrapper.helper.config:
config_list = tg_wrapper.helper.config[exchange]
else:
config_list = {exchange: {"config": {}}}
for option in options:
if option["value"] in enabled_list:
if exchange in tg_wrapper.helper.config:
tg_wrapper.helper.config[exchange]["config"][option["value"]] = 1
config_list["config"].update({option["value"]: 1})
else:
if exchange in tg_wrapper.helper.config:
tg_wrapper.helper.config[exchange]["config"][option["value"]] = 0
config_list["config"].update({option["value"]: 0})
for option in sell_options:
if option["value"] in sell_list:
if exchange in tg_wrapper.helper.config:
tg_wrapper.helper.config[exchange]["config"][option["value"]] = 1
config_list["config"].update({option["value"]: 1})
else:
if exchange in tg_wrapper.helper.config:
tg_wrapper.helper.config[exchange]["config"][option["value"]] = 0
config_list["config"].update({option["value"]: 0})
for option in extras_options:
if option["value"] in options_list:
if exchange in tg_wrapper.helper.config:
tg_wrapper.helper.config[exchange]["config"][option["value"]] = 1
config_list["config"].update({option["value"]: 1})
else:
if exchange in tg_wrapper.helper.config:
tg_wrapper.helper.config[exchange]["config"][option["value"]] = 0
config_list["config"].update({option["value"]: 0})
tg_wrapper.helper.config[exchange].update(config_list)
print(tg_wrapper.helper.config[exchange]["config"])
return True
else:
return True | Make config changes |
168,124 | from dash import dcc, html, Input, Output, callback, State
import dash_bootstrap_components as dbc
from models.telegram import Wrapper
from models.exchange import Granularity
tg_wrapper = Wrapper("config.json", "webgui")
tg_wrapper.helper.read_config()
class Granularity(Enum):
ONE_MINUTE = 60, "1m", "1min", "1T"
FIVE_MINUTES = 300, "5m", "5min", "5T"
FIFTEEN_MINUTES = 900, "15m", "15min", "15T"
THIRTY_MINUTES = 1800, "30m", "30min", "30T"
ONE_HOUR = 3600, "1h", "1hour", "1H"
SIX_HOURS = 21600, "6h", "6hour", "6H"
ONE_DAY = 86400, "1d", "1day", "1D"
def __init__(self, integer, short, medium, frequency):
self.integer = integer
self.short = short
self.medium = medium
self.frequency = frequency
def convert_to_enum(value):
for granularity in Granularity:
for enum_value in granularity.value:
if enum_value == value:
return granularity
raise ValueError("Invalid Granularity")
def to_short(self):
return self.short
def to_integer(self):
return self.integer
def to_medium(self):
return self.medium
def get_frequency(self):
return self.frequency
The provided code snippet includes necessary dependencies for implementing the `switch_granularity` function. Write a Python function `def switch_granularity(gran, exchange)` to solve the following problem:
Set exchange granularity
Here is the function:
def switch_granularity(gran, exchange):
"""Set exchange granularity"""
if exchange is not None and gran != "":
if gran == "ss":
tg_wrapper.helper.config[exchange]["config"].pop("granularity")
else:
gran = Granularity.Granularity.convert_to_enum(gran)
if exchange == "coinbase":
tg_wrapper.helper.config[exchange]["config"].update(
{"granularity": gran.to_integer}
)
if exchange == "coinbasepro":
tg_wrapper.helper.config[exchange]["config"].update(
{"granularity": gran.to_integer}
)
if exchange == "binance":
tg_wrapper.helper.config[exchange]["config"].update(
{"granularity": gran.to_medium}
)
if exchange == "kucoin":
tg_wrapper.helper.config[exchange]["config"].update(
{"granularity": gran.to_short}
)
return True | Set exchange granularity |
168,125 | import os
import re
import sys
import argparse
import webbrowser
from threading import Timer
from logsvc.app import app
http_host = "0.0.0.0"
http_port = 5000
def open_browser() -> None:
webbrowser.open_new(f"http://{http_host}:{http_port}/") | null |
168,126 | import re
import sys
import datetime
from models.Trading import TechnicalAnalysis
from models.exchange.binance import PublicAPI as BPublicAPI
from models.exchange.coinbase_pro import PublicAPI as CPublicAPI
def header() -> str:
return """
<!doctype html>
<html lang="en">
<head>
<!-- Required meta tags -->
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<script type="text/javascript" src="https://code.jquery.com/jquery-3.6.0.min.js"></script>
<script type="text/javascript" src="https://cdn.datatables.net/1.11.0/js/jquery.dataTables.min.js"></script>
<link rel="stylesheet" type="text/css" href="https://cdn.datatables.net/1.11.0/css/jquery.dataTables.min.css">
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.1.0/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-KyZXEAg3QhqLMpG8r+8fhAXLRk2vvoC2f3B09zVXn8CA5QIVfZOJ3BCsw2P0p/We" crossorigin="anonymous">
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.1.0/dist/js/bootstrap.bundle.min.js" integrity="sha384-U1DAWAznBHeqEIlVSCgzq+c9gqGAJn5c/t99JyeKa9xxaYpSvHU5awsuZVVFIhvj" crossorigin="anonymous"></script>
<script type="text/javascript" src="js/app.js"></script>
<script type="text/css" src="css/app.css"></script>
<title>PyCryptoBot Web Portal</title>
</head>
<body>
""" | null |
168,127 | import re
import sys
import datetime
from models.Trading import TechnicalAnalysis
from models.exchange.binance import PublicAPI as BPublicAPI
from models.exchange.coinbase_pro import PublicAPI as CPublicAPI
def footer() -> str:
return """
</body>
</html>
""" | null |
168,128 | import re
import sys
import datetime
from models.Trading import TechnicalAnalysis
from models.exchange.binance import PublicAPI as BPublicAPI
from models.exchange.coinbase_pro import PublicAPI as CPublicAPI
def is_binance_market_valid(market: str) -> bool:
p = re.compile(r"^[A-Z0-9]{5,12}$")
if p.match(market):
return True
return False | null |
168,129 | import re
import sys
import datetime
from models.Trading import TechnicalAnalysis
from models.exchange.binance import PublicAPI as BPublicAPI
from models.exchange.coinbase_pro import PublicAPI as CPublicAPI
def is_coinbase_market_valid(market: str) -> bool:
p = re.compile(r"^[0-9A-Z]{1,20}\-[1-9A-Z]{2,5}$")
if p.match(market):
return True
return False | null |
168,130 | import os
import sys
import time
import signal
from models.exchange.coinbase_pro import WebSocketClient as CWebSocketClient
from models.exchange.Granularity import Granularity
def cls():
os.system("cls" if os.name == "nt" else "clear") | null |
168,131 | import os
import sys
import time
import signal
from models.exchange.coinbase_pro import WebSocketClient as CWebSocketClient
from models.exchange.Granularity import Granularity
def handler(signum, frame):
if signum == 2:
print(" -> not finished yet!")
return | null |
168,132 | import sys
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
from controllers.PyCryptoBot import PyCryptoBot
from models.exchange.ExchangesEnum import Exchange
from models.exchange.Granularity import Granularity
from models.Trading import TechnicalAnalysis
upper_band = ma + 2 * std
lower_band = ma - 2 * std
buy_signals, sell_signals = trade(data)
def trade(data):
buy_signals = []
sell_signals = []
for i in range(1, len(data)):
if data["close"][i] > upper_band[i] and data["close"][i - 1] < upper_band[i - 1]:
buy_signals.append(i)
elif data["close"][i] < lower_band[i] and data["close"][i - 1] > lower_band[i - 1]:
sell_signals.append(i)
return buy_signals, sell_signals | null |
168,133 | import os
import sys
import time
import signal
from models.exchange.binance import WebSocketClient as BWebSocketClient
from models.exchange.Granularity import Granularity
def cls():
os.system("cls" if os.name == "nt" else "clear") | null |
168,134 | import os
import sys
import time
import signal
from models.exchange.binance import WebSocketClient as BWebSocketClient
from models.exchange.Granularity import Granularity
def signal_handler(signum, frame):
if signum == 2:
print(" -> not finished yet!")
return | null |
168,135 | import os
import sys
import time
import signal
from models.exchange.kucoin import WebSocketClient as KWebSocketClient
from models.exchange.Granularity import Granularity
def cls():
os.system("cls" if os.name == "nt" else "clear") | null |
168,136 | import os
import sys
import time
import signal
from models.exchange.kucoin import WebSocketClient as KWebSocketClient
from models.exchange.Granularity import Granularity
def signal_handler(signum, frame):
if signum == 2:
print(" -> not finished yet!")
return | null |
168,138 | import os
import sys
import time
import signal
from models.exchange.coinbase_pro import WebSocketClient as CWebSocketClient
from models.exchange.Granularity import Granularity
def signal_handler(signum, frame):
if signum == 2:
print(" -> not finished yet!")
return | null |
168,139 | import os
import sys
import time
import signal
from models.exchange.binance import WebSocketClient as BWebSocketClient
def cls():
os.system("cls" if os.name == "nt" else "clear") | null |
168,140 | import os
import sys
import time
import signal
from models.exchange.binance import WebSocketClient as BWebSocketClient
def handler(signum, frame):
if signum == 2:
print(" -> not finished yet!")
return | null |
168,142 | import os
import sys
import time
import signal
from models.exchange.kucoin import WebSocketClient as KWebSocketClient
from models.exchange.Granularity import Granularity
def handler(signum, frame):
if signum == 2:
print(" -> not finished yet!")
return | null |
168,143 | import os
import sys
import time
import json
import random
import sched
import signal
import functools
import pandas as pd
import numpy as np
from regex import R
from rich.console import Console
from rich.table import Table
from rich.text import Text
from rich import box
from datetime import datetime, timedelta
from os.path import exists as file_exists
from urllib3.exceptions import ReadTimeoutError
from models.BotConfig import BotConfig
from models.exchange.ExchangesEnum import Exchange
from models.exchange.Granularity import Granularity
from models.exchange.coinbase_pro import WebSocketClient as CWebSocketClient
from models.exchange.coinbase_pro import AuthAPI as CAuthAPI, PublicAPI as CPublicAPI
from models.exchange.kucoin import AuthAPI as KAuthAPI, PublicAPI as KPublicAPI
from models.exchange.kucoin import WebSocketClient as KWebSocketClient
from models.exchange.binance import AuthAPI as BAuthAPI, PublicAPI as BPublicAPI
from models.exchange.binance import WebSocketClient as BWebSocketClient
from models.exchange.coinbase import AuthAPI as CBAuthAPI
from models.exchange.coinbase import WebSocketClient as CBWebSocketClient
from models.helper.TelegramBotHelper import TelegramBotHelper
from models.helper.MarginHelper import calculate_margin
from models.TradingAccount import TradingAccount
from models.Stats import Stats
from models.AppState import AppState
from models.helper.TextBoxHelper import TextBox
from models.Strategy import Strategy
from views.TradingGraphs import TradingGraphs
from views.PyCryptoBot import RichText
from utils.PyCryptoBot import truncate as _truncate
from utils.PyCryptoBot import compare as _compare
def signal_handler(signum):
if signum == 2:
print("Please be patient while websockets terminate!")
return | null |
168,144 | import json
import os
from datetime import datetime, timedelta
import pandas as pd
import dash_bootstrap_components as dbc
import dash_daq as daq
from dash import (
Dash,
html,
dcc,
callback,
clientside_callback,
Input,
Output,
dash_table,
)
from pages import controls, config, terminals, telegramconfig
dashboard_layout = html.Div(
children=[
dbc.Row(
dbc.Col(
[
html.H4("Dashboard", style={"textAlign": "left"}),
],
width={"size": 1},
),
),
html.Br(),
dbc.Row(
[
dbc.Col(
[
dash_table.DataTable(
id="table-paging-and-sorting",
page_action="native",
# move below table
css=[{"selector": ".show-hide", "rule": "display: none"}],
page_current=0,
page_size=15,
sort_action="native",
style_cell={
"text_align": "center",
"font_size": "14px",
"font_family": "Arial",
},
style_as_list_view=True,
style_header={
"textAlign": "center",
"backgroundColor": "rgb(30, 30, 30)",
"color": "white",
"fontWeight": "bold",
"font_size": "14px",
},
columns=[
{"name": "Uptime", "id": "Uptime", "type": "text"},
{"name": "Pair", "id": "Trading Pair", "type": "text"},
{"name": "Exchange", "id": "Exchange", "type": "text"},
{
"name": "Action",
"id": "Action",
"type": "numeric",
},
{
"name": "Price",
"id": "Current self.price",
"type": "numeric",
},
dict(
id="Margin",
name="Margin",
type="numeric",
format=percentage,
),
{"name": "TSLT", "id": "TSLT", "type": "text"},
{"name": "PVLT", "id": "PVLT", "type": "text"},
dict(
id="From DF High",
name="From DF High",
type="numeric",
format=percentage,
),
{"name": "DF High", "id": "DF High", "type": "numeric"},
{"name": "Delta", "id": "Delta", "type": "numeric"},
{"name": "BULL", "id": "BULL", "type": "text"},
{"name": "ERI", "id": "ERI", "type": "text"},
{"name": "EMA", "id": "EMA", "type": "text"},
{"name": "MACD", "id": "MACD", "type": "text"},
{"name": "OBV", "id": "OBV", "type": "text"},
],
style_data={
"backgroundColor": "rgb(50, 50, 50)",
"color": "white",
},
style_data_conditional=[
{
"if": {"row_index": "odd"},
"backgroundColor": "rgb(70, 70, 70)",
},
# set column widths
{"if": {"column_id": "Trading Pair"}, "width": "180px"},
{"if": {"column_id": "Action"}, "width": "130px"},
{
"if": {"column_id": "Current self.price"},
"width": "160px",
},
{"if": {"column_id": "Margin"}, "width": "160px"},
{"if": {"column_id": "TSLT"}, "width": "80px"},
{"if": {"column_id": "PVLT"}, "width": "80px"},
{"if": {"column_id": "From DF High"}, "width": "130px"},
{"if": {"column_id": "DF High"}, "width": "130px"},
{"if": {"column_id": "BULL"}, "width": "80px"},
{"if": {"column_id": "ERI"}, "width": "80px"},
{"if": {"column_id": "EMA"}, "width": "80px"},
{"if": {"column_id": "MACD"}, "width": "80px"},
{"if": {"column_id": "OBV"}, "width": "80px"},
# indicator states
# add gradients for from_df_hi and margins to represent position, when from df high is > 0 make df hi green
{
"if": {
"filter_query": "{Margin} > 0",
"column_id": "Margin",
},
"backgroundColor": "#3D9970",
"color": "white",
},
{
"if": {
"filter_query": "{Margin} < 0",
"column_id": "Margin",
},
"backgroundColor": "#99413d",
"color": "white",
},
{
"if": {
"filter_query": "{From DF High} > 0",
"column_id": "From DF High",
},
"backgroundColor": "#3D9970",
"color": "white",
},
{
"if": {
"filter_query": "{From DF High} < 0",
"column_id": "From DF High",
},
"backgroundColor": "#99413d",
"color": "white",
},
{
"if": {
"filter_query": '{TSLT} = "True"',
"column_id": "TSLT",
},
"backgroundColor": "#3D9970",
"color": "white",
},
{
"if": {
"filter_query": '{PVLT} = "True"',
"column_id": "PVLT",
},
"backgroundColor": "#3D9970",
"color": "white",
},
{
"if": {
"filter_query": '{BULL} = "True"',
"column_id": "BULL",
},
"backgroundColor": "#3D9970",
"color": "white",
},
{
"if": {
"filter_query": '{BULL} = "False"',
"column_id": "BULL",
},
"backgroundColor": "#99413d",
"color": "white",
},
{
"if": {
"filter_query": '{ERI} = "True"',
"column_id": "ERI",
},
"backgroundColor": "#3D9970",
"color": "white",
},
{
"if": {
"filter_query": '{ERI} = "False"',
"column_id": "ERI",
},
"backgroundColor": "#99413d",
"color": "white",
},
{
"if": {
"filter_query": '{EMA} = "True"',
"column_id": "EMA",
},
"backgroundColor": "#3D9970",
"color": "white",
},
{
"if": {
"filter_query": '{EMA} = "False"',
"column_id": "EMA",
},
"backgroundColor": "#99413d",
"color": "white",
},
{
"if": {
"filter_query": '{MACD} = "True"',
"column_id": "MACD",
},
"backgroundColor": "#3D9970",
"color": "white",
},
{
"if": {
"filter_query": '{MACD} = "False"',
"column_id": "MACD",
},
"backgroundColor": "#99413d",
"color": "white",
},
{
"if": {
"filter_query": '{OBV} = "True"',
"column_id": "OBV",
},
"backgroundColor": "#3D9970",
"color": "white",
},
{
"if": {
"filter_query": '{OBV} = "False"',
"column_id": "OBV",
},
"backgroundColor": "#99413d",
"color": "white",
},
{
"if": {
"filter_query": "{Action} != SELL",
"column_id": "Action",
},
"backgroundColor": "#3D9970",
"color": "white",
},
],
),
],
),
]
),
# update interval
dcc.Interval(id="interval-container", interval=10000, n_intervals=0),
html.P(),
# graphs
dbc.Row(
[
# margin graph
dbc.Col(
[
# html.Div(id='margin-current'),
daq.Gauge(
label="Current Margins",
id="margin-current",
color={
"gradient": True,
"ranges": {
"#99413d": [-35, -20],
"#F1C232": [-20, 20],
"#3D9970": [20, 35],
},
},
value=0,
max=35,
min=-35,
size=160,
)
]
),
dbc.Col(
[
daq.Gauge(
label="7 Day Margins",
id="margin-7Dtotal",
color={
"gradient": True,
"ranges": {
"#99413d": [-100, -20],
"#F1C232": [-20, 20],
"#3D9970": [20, 100],
},
},
value=0,
max=100,
min=-100,
size=160,
)
]
),
]
),
dbc.Row(
[
dbc.Col(
[
html.H5("Margin", style={"textAlign": "center"}),
html.Div(id="margin-graph"),
],
lg=5,
xl=5,
),
# df high graph
dbc.Col(
[
html.H5("From DF High", style={"textAlign": "center"}),
html.Div(id="from-df-high"),
],
lg=5,
xl=5,
),
],
justify="evenly",
),
]
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
The provided code snippet includes necessary dependencies for implementing the `display_page` function. Write a Python function `def display_page(pathname)` to solve the following problem:
page navigation
Here is the function:
def display_page(pathname):
"""page navigation"""
if pathname == "/controls":
return controls.layout
if pathname == "/config":
return config.layout
if pathname == "/terminals":
return terminals.layout
if pathname == "/telegramconfig":
return telegramconfig.layout
else:
return dashboard_layout | page navigation |
168,145 | import json
import os
from datetime import datetime, timedelta
import pandas as pd
import dash_bootstrap_components as dbc
import dash_daq as daq
from dash import (
Dash,
html,
dcc,
callback,
clientside_callback,
Input,
Output,
dash_table,
)
from pages import controls, config, terminals, telegramconfig
tg_wrapper = controls.tg_wrapper
df = []
def get_date_from_iso8601_str(date: str): # pylint: disable=invalid-name
"""Bot instance uptime tracking"""
now = str(datetime.now())
# If date passed from datetime.now() remove milliseconds
if date.find(".") != -1:
dt = date.split(".")[0]
date = dt
if now.find(".") != -1:
dt = now.split(".")[0]
now = dt
now = now.replace("T", " ")
now = f"{now}"
# Add time in case only a date is passed in
date = date.replace("T", " ") if date.find("T") != -1 else date
# Add time in case only a date is passed in
new_date_str = f"{date} 00:00:00" if len(date) == 10 else date
started = datetime.strptime(new_date_str, "%Y-%m-%d %H:%M:%S")
now = datetime.strptime(now, "%Y-%m-%d %H:%M:%S")
duration = now - started
duration_in_s = duration.total_seconds()
hours = divmod(duration_in_s, 3600)[0]
duration_in_s -= 3600 * hours
minutes = divmod(duration_in_s, 60)[0]
return f"{round(hours)}h {round(minutes)}m"
Output("table-paging-and-sorting", "data"),
Input("interval-container", "n_intervals"),
The provided code snippet includes necessary dependencies for implementing the `update_table` function. Write a Python function `def update_table(n)` to solve the following problem:
Update all data
Here is the function:
def update_table(n):
"""Update all data"""
pairs_list = tg_wrapper.helper.get_active_bot_list() # glob.glob(json_pattern)
df = pd.DataFrame(
columns=[
"Uptime",
"Trading Pair",
"Exchange",
"Action",
"Current self.price",
"From DF High",
"DF High",
"Margin",
"Delta",
"TSLT",
"PVLT",
"ERI",
"BULL",
"EMA",
"MACD",
"OBV",
],
)
for pair in pairs_list:
if (
"data.json" not in pair
and not pair.__contains__("output.json")
and "settings.json" not in pair
):
try:
with open(
os.path.join(
tg_wrapper.helper.datafolder, "telegram_data", f"{pair}.json"
),
encoding="utf8",
) as f:
json_data = pd.json_normalize(json.loads(f.read()))
json_data["pair"] = pair
uptime = get_date_from_iso8601_str(json_data["botcontrol.started"][0])
if (
isinstance(json_data["margin"][0], str)
and "%" in json_data["margin"][0]
and "-" in json_data["margin"][0]
):
margincolor = "#99413d"
elif (
isinstance(json_data["margin"][0], str)
and "%" in json_data["margin"][0]
and "-" not in json_data["margin"][0]
):
margincolor = "#3D9970"
elif (
isinstance(json_data["from_df_high"][0], str)
and "%" in json_data["from_df_high"][0]
and "-" in json_data["from_df_high"][0]
):
margincolor = "#99413d"
elif (
isinstance(json_data["from_df_high"][0], str)
and "%" in json_data["from_df_high"][0]
and "-" not in json_data["from_df_high"][0]
):
margincolor = "#3D9970"
data = pd.DataFrame(
{
"Uptime": uptime,
"Trading Pair": json_data["pair"],
"Exchange": json_data["exchange"],
"Action": json_data["signal"],
# if "margin" in json_data and json_data["margin"][0] == " "
# else "BUY",
"Current self.price": json_data["price"],
"Margin": json_data["margin"]
if "margin" in json_data and json_data["margin"][0] != " "
else "NaN",
"TSLT": json_data["trailingstoplosstriggered"]
if "trailingstoplosstriggered" in json_data
else "",
"PVLT": json_data["preventlosstriggered"]
if "preventlosstriggered" in json_data
else "",
"From DF High": json_data["from_df_high"]
if "from_df_high" in json_data
and json_data["from_df_high"][0] != " "
else "NaN",
"DF High": json_data["df_high"]
if "df_high" in json_data
else "",
"BULL": json_data["indicators.BULL"]
if "indicators.BULL" in json_data
else "",
"ERI": json_data["indicators.ERI"]
if "indicators.ERI" in json_data
else "",
"EMA": json_data["indicators.EMA"]
if "indicators.EMA" in json_data
else "",
"MACD": json_data["indicators.MACD"]
if "indicators.MACD" in json_data
else "",
"OBV": json_data["indicators.OBV"]
if "indicators.OBV" in json_data
else "",
"Margincolor": margincolor,
}
)
# df = df.append(data, ignore_index=True)
df = pd.concat([df, data])
except KeyError:
print("oops")
except Exception as err:
print(err)
# change data types of dataframe for conditional statements
if len(pairs_list) > 0:
df["Margin"] = df["Margin"].map(lambda x: x.rstrip("%"))
df["Margin"] = df["Margin"].fillna(0)
df["Margin"] = df["Margin"].astype(float, errors="ignore")
df["Margin"] = df["Margin"] * 0.01
# df_margin = (df['Margin'].mean())*100
df["From DF High"] = df["From DF High"].map(lambda x: x.rstrip("%"))
df["From DF High"] = df["From DF High"].fillna(0)
df["From DF High"] = df["From DF High"].astype(float, errors="ignore")
df["From DF High"] = df["From DF High"] * 0.01
df["TSLT"] = df["TSLT"].astype(str)
df["PVLT"] = df["PVLT"].astype(str)
df["BULL"] = df["BULL"].astype(str)
df["ERI"] = df["ERI"].astype(str)
df["EMA"] = df["EMA"].astype(str)
df["MACD"] = df["MACD"].astype(str)
df["OBV"] = df["OBV"].astype(str)
df = df.sort_values(by="Action", ascending=[True], inplace=False)
return df.to_dict(orient="records") | Update all data |
168,146 | import json
import os
from datetime import datetime, timedelta
import pandas as pd
import dash_bootstrap_components as dbc
import dash_daq as daq
from dash import (
Dash,
html,
dcc,
callback,
clientside_callback,
Input,
Output,
dash_table,
)
from pages import controls, config, terminals, telegramconfig
df = []
dff = []
The provided code snippet includes necessary dependencies for implementing the `update_graphs` function. Write a Python function `def update_graphs(rows, derived_virtual_selected_rows)` to solve the following problem:
Update graphs
Here is the function:
def update_graphs(rows, derived_virtual_selected_rows):
"""Update graphs"""
if derived_virtual_selected_rows is None:
derived_virtual_selected_rows = []
dff = df if rows is None else pd.DataFrame(rows)
dff["Margin"] = dff["Margin"] * 100
dff["From DF High"] = dff["From DF High"] * 100
colors = [
"white" if i in derived_virtual_selected_rows else dff["Margincolor"]
for i in range(len(dff))
]
return [
dcc.Graph(
id="Margin",
figure={
"data": [
{
"x": dff["Trading Pair"],
"y": dff["Margin"],
"type": "bar",
# [(-25,'#99413d'), (25,'#3D9970')]
"marker": {
"color": colors[0],
},
}
],
"layout": {
"plot_bgcolor": "rgba(0,0,0,0)",
"paper_bgcolor": "rgba(0,0,0,0)",
"font": {"color": "white"},
"xaxis": {"automargin": True},
"yaxis": {"automargin": True},
"orientation": "h",
"height": 400,
"margin": {"t": 10, "l": 10, "r": 10},
},
},
)
for column in ["Margin"]
if column in dff
] | Update graphs |
168,147 | import json
import os
from datetime import datetime, timedelta
import pandas as pd
import dash_bootstrap_components as dbc
import dash_daq as daq
from dash import (
Dash,
html,
dcc,
callback,
clientside_callback,
Input,
Output,
dash_table,
)
from pages import controls, config, terminals, telegramconfig
df = []
dff = []
The provided code snippet includes necessary dependencies for implementing the `update_graphs1` function. Write a Python function `def update_graphs1(rows, derived_virtual_selected_rows)` to solve the following problem:
Update Graphs
Here is the function:
def update_graphs1(rows, derived_virtual_selected_rows):
"""Update Graphs"""
if derived_virtual_selected_rows is None:
derived_virtual_selected_rows = []
dff = df if rows is None else pd.DataFrame(rows)
dff["From DF High"] = dff["From DF High"] * 100
colors = [
"white" if i in derived_virtual_selected_rows else dff["Margincolor"]
for i in range(len(dff))
]
return [
dcc.Graph(
id="From DF High",
figure={
"data": [
{
"x": dff["Trading Pair"],
"y": dff["From DF High"],
"type": "bar",
"marker": {
"color": colors[0],
},
}
],
"layout": {
"plot_bgcolor": "rgba(0,0,0,0)",
"paper_bgcolor": "rgba(0,0,0,0)",
"font": {"color": "white"},
"xaxis": {"automargin": True},
"yaxis": {"automargin": True},
"orientation": "h",
"height": 400,
# "width": 750, ### this is roughly half screen width
"margin": {"t": 10, "l": 10, "r": 10},
},
},
)
for column in ["From DF High"]
if column in dff
] | Update Graphs |
168,148 | import json
import os
from datetime import datetime, timedelta
import pandas as pd
import dash_bootstrap_components as dbc
import dash_daq as daq
from dash import (
Dash,
html,
dcc,
callback,
clientside_callback,
Input,
Output,
dash_table,
)
from pages import controls, config, terminals, telegramconfig
df = []
dff = []
The provided code snippet includes necessary dependencies for implementing the `gauge1` function. Write a Python function `def gauge1(rows, derived_virtual_selected_rows)` to solve the following problem:
Active Margins Gauge
Here is the function:
def gauge1(rows, derived_virtual_selected_rows):
"""Active Margins Gauge"""
if derived_virtual_selected_rows is None:
derived_virtual_selected_rows = []
dff = df if rows is None else pd.DataFrame(rows)
df_margin = (dff["Margin"].sum()) * 100
return df_margin | Active Margins Gauge |
168,149 | import json
import os
from datetime import datetime, timedelta
import pandas as pd
import dash_bootstrap_components as dbc
import dash_daq as daq
from dash import (
Dash,
html,
dcc,
callback,
clientside_callback,
Input,
Output,
dash_table,
)
from pages import controls, config, terminals, telegramconfig
tg_wrapper = controls.tg_wrapper
The provided code snippet includes necessary dependencies for implementing the `gauge2` function. Write a Python function `def gauge2(rows, derived_virtual_selected_rows)` to solve the following problem:
7 Day Total Margins Gauge
Here is the function:
def gauge2(rows, derived_virtual_selected_rows):
"""7 Day Total Margins Gauge"""
days = -7
trade_counter = 0
margin_calculation = 0
today = datetime.now()
week = today + timedelta(days)
tg_wrapper.helper.read_data()
for trade_datetime in tg_wrapper.helper.data["trades"]:
if (
datetime.strptime(trade_datetime, "%Y-%m-%d %H:%M:%S").isoformat()
> week.isoformat()
):
trade_counter += 1
margin = float(
tg_wrapper.helper.data["trades"][trade_datetime]["margin"][
: tg_wrapper.helper.data["trades"][trade_datetime]["margin"].find(
"%"
)
]
)
margin_calculation += margin
# avg_margin = margin_calculation/trade_counter
return margin_calculation | 7 Day Total Margins Gauge |
168,150 | import json
import os
from datetime import datetime, timedelta
import pandas as pd
import dash_bootstrap_components as dbc
import dash_daq as daq
from dash import (
Dash,
html,
dcc,
callback,
clientside_callback,
Input,
Output,
dash_table,
)
from pages import controls, config, terminals, telegramconfig
The provided code snippet includes necessary dependencies for implementing the `page_width_column_adjustment` function. Write a Python function `def page_width_column_adjustment(screen_res)` to solve the following problem:
hides some columns based on screen width
Here is the function:
def page_width_column_adjustment(screen_res):
"""hides some columns based on screen width"""
small = 0
medium = 500
large = 875
print(screen_res)
hide_columns = []
if screen_res["width"] >= small and screen_res["width"] <= medium:
hide_columns = [
"Uptime",
"Exchange",
"Price",
"TSLT",
"PVLT",
"ERI",
"EMA",
"MACD",
"OBV",
"Action",
"DF High",
"Delta",
]
elif screen_res["width"] >= medium and screen_res["width"] < large:
hide_columns = ["Exchange", "TSLT", "PVLT", "DF High", "Delta"]
# data = data.drop(columns=['Uptime'])
print(screen_res)
return hide_columns | hides some columns based on screen width |
168,151 | import time
import json
import pandas as pd
import re
import sys
from datetime import datetime
from decimal import Decimal
from itertools import islice
from tradingview_ta import *
from importlib.metadata import version
from controllers.PyCryptoBot import PyCryptoBot
from models.helper.TelegramBotHelper import TelegramBotHelper as TGBot
from models.exchange.binance import PublicAPI as BPublicAPI
from models.exchange.coinbase import AuthAPI as CBAuthAPI
from models.exchange.coinbase_pro import PublicAPI as CPublicAPI
from models.exchange.kucoin import PublicAPI as KPublicAPI
from models.exchange.Granularity import Granularity
from models.exchange.ExchangesEnum import Exchange as CryptoExchange
class PyCryptoBot(BotConfig):
def __init__(self, config_file: str = None, exchange: Exchange = None):
self.config_file = config_file or "config.json"
super(PyCryptoBot, self).__init__(filename=self.config_file, exchange=exchange)
self.console_term = Console(no_color=(not self.term_color), width=self.term_width) # logs to the screen
self.console_log = Console(file=open(self.logfile, "w"), no_color=True, width=self.log_width) # logs to file
self.table_console = Table(title=None, box=None, show_header=False, show_footer=False)
self.s = sched.scheduler(time.time, time.sleep)
self.price = 0
self.takerfee = -1.0
self.makerfee = 0.0
self.account = None
self.state = None
self.technical_analysis = None
self.websocket_connection = None
self.ticker_self = None
self.df_last = pd.DataFrame()
self.trading_data = pd.DataFrame()
self.telegram_bot = TelegramBotHelper(self)
self.trade_tracker = pd.DataFrame(
columns=[
"Datetime",
"Market",
"Action",
"Price",
"Base",
"Quote",
"Margin",
"Profit",
"Fee",
"DF_High",
"DF_Low",
]
)
if trading_myPta is True and pandas_ta_enabled is True:
self.enable_pandas_ta = True
else:
self.enable_pandas_ta = False
def execute_job(self):
"""Trading bot job which runs at a scheduled interval"""
if self.is_live:
self.state.account.mode = "live"
else:
self.state.account.mode = "test"
# This is used to control some API calls when using websockets
last_api_call_datetime = datetime.now() - self.state.last_api_call_datetime
if last_api_call_datetime.seconds > 60:
self.state.last_api_call_datetime = datetime.now()
# This is used by the telegram bot
# If it not enabled in config while will always be False
if not self.is_sim and not self.disabletelegram:
control_status = self.telegram_bot.check_bot_control_status()
while control_status == "pause" or control_status == "paused":
if control_status == "pause":
RichText.notify("Pausing bot", self, "normal")
self.notify_telegram(f"{self.market} bot is paused")
self.telegram_bot.update_bot_status("paused")
if self.websocket:
RichText.notify("Closing websocket...", self, "normal")
self.websocket_connection.close()
time.sleep(30)
control_status = self.telegram_bot.check_bot_control_status()
if control_status == "start":
RichText.notify("Restarting bot", self, "normal")
self.notify_telegram(f"{self.market} bot has restarted")
self.telegram_bot.update_bot_status("active")
self.read_config(self.exchange)
if self.websocket:
RichText.notify("Starting websocket...", self, "normal")
self.websocket_connection.start()
if control_status == "exit":
RichText.notify("Closing Bot {self.market}", self, "normal")
self.notify_telegram(f"{self.market} bot is stopping")
self.telegram_bot.remove_active_bot()
sys.exit(0)
if control_status == "reload":
RichText.notify(f"Reloading config parameters {self.market}", self, "normal")
self.read_config(self.exchange)
if self.websocket:
self.websocket_connection.close()
if self.exchange == Exchange.BINANCE:
self.websocket_connection = BWebSocketClient([self.market], self.granularity, app=self)
elif self.exchange == Exchange.COINBASE:
self.websocket_connection = CBWebSocketClient([self.market], self.granularity, app=self)
elif self.exchange == Exchange.COINBASEPRO:
self.websocket_connection = CWebSocketClient([self.market], self.granularity, app=self)
elif self.exchange == Exchange.KUCOIN:
self.websocket_connection = KWebSocketClient([self.market], self.granularity, app=self)
self.websocket_connection.start()
list(map(self.s.cancel, self.s.queue))
self.s.enter(
5,
1,
self.execute_job,
(),
)
# self.read_config(self.exchange)
self.telegram_bot.update_bot_status("active")
else:
# runs once at the start of a simulation
if self.app_started:
if self.simstartdate is not None:
try:
self.state.iterations = self.trading_data.index.get_loc(str(self.get_date_from_iso8601_str(self.simstartdate)))
except KeyError:
RichText.notify("Simulation data is invalid, unable to locate interval using date key.", self, "error")
sys.exit(0)
self.app_started = False
# reset self.websocket_connection every 23 hours if applicable
if self.websocket and not self.is_sim:
if self.websocket_connection.time_elapsed > 82800:
RichText.notify("Websocket requires a restart every 23 hours!", self, "normal")
RichText.notify("Stopping websocket...", self, "normal")
self.websocket_connection.close()
RichText.notify("Starting websocket...", self, "normal")
self.websocket_connection.start()
RichText.notify("Restarting job in 30 seconds...", self, "normal")
self.s.enter(
30,
1,
self.execute_job,
(),
)
# increment self.state.iterations
self.state.iterations = self.state.iterations + 1
if not self.is_sim:
# check if data exists or not and only refresh at candle close.
if len(self.trading_data) == 0 or (
len(self.trading_data) > 0
and (
datetime.timestamp(datetime.utcnow()) - self.granularity.to_integer
>= datetime.timestamp(
self.trading_data.iloc[
self.state.closed_candle_row,
self.trading_data.columns.get_loc("date"),
]
)
)
):
self.trading_data = self.get_historical_data(self.market, self.granularity, self.websocket_connection)
self.state.closed_candle_row = -1
self.price = float(self.trading_data.iloc[-1, self.trading_data.columns.get_loc("close")])
else:
# set time and price with ticker data and add/update current candle
ticker = self.get_ticker(self.market, self.websocket_connection)
# if 0, use last close value as self.price
self.price = self.trading_data["close"].iloc[-1] if ticker[1] == 0 else ticker[1]
self.ticker_date = ticker[0]
self.ticker_price = ticker[1]
if self.state.closed_candle_row == -2:
self.trading_data.iloc[-1, self.trading_data.columns.get_loc("low")] = (
self.price if self.price < self.trading_data["low"].iloc[-1] else self.trading_data["low"].iloc[-1]
)
self.trading_data.iloc[-1, self.trading_data.columns.get_loc("high")] = (
self.price if self.price > self.trading_data["high"].iloc[-1] else self.trading_data["high"].iloc[-1]
)
self.trading_data.iloc[-1, self.trading_data.columns.get_loc("close")] = self.price
self.trading_data.iloc[-1, self.trading_data.columns.get_loc("date")] = datetime.strptime(ticker[0], "%Y-%m-%d %H:%M:%S")
tsidx = pd.DatetimeIndex(self.trading_data["date"])
self.trading_data.set_index(tsidx, inplace=True)
self.trading_data.index.name = "ts"
else:
# not sure what this code is doing as it has a bug.
# i've added a websocket check and added a try..catch block
if self.websocket:
try:
self.trading_data.loc[len(self.trading_data.index)] = [
datetime.strptime(ticker[0], "%Y-%m-%d %H:%M:%S"),
self.trading_data["market"].iloc[-1],
self.trading_data["granularity"].iloc[-1],
(self.price if self.price < self.trading_data["close"].iloc[-1] else self.trading_data["close"].iloc[-1]),
(self.price if self.price > self.trading_data["close"].iloc[-1] else self.trading_data["close"].iloc[-1]),
self.trading_data["close"].iloc[-1],
self.price,
self.trading_data["volume"].iloc[-1],
]
tsidx = pd.DatetimeIndex(self.trading_data["date"])
self.trading_data.set_index(tsidx, inplace=True)
self.trading_data.index.name = "ts"
self.state.closed_candle_row = -2
except Exception:
pass
else:
self.df_last = self.get_interval(self.trading_data, self.state.iterations)
if len(self.df_last) > 0 and "close" in self.df_last:
self.price = self.df_last["close"][0]
if len(self.trading_data) == 0:
return None
# analyse the market data
if self.is_sim and len(self.trading_data.columns) > 8:
df = self.trading_data
# if smartswitch then get the market data using new granularity
if self.sim_smartswitch:
self.df_last = self.get_interval(df, self.state.iterations)
if len(self.df_last.index.format()) > 0:
if self.simstartdate is not None:
start_date = self.get_date_from_iso8601_str(self.simstartdate)
else:
start_date = self.get_date_from_iso8601_str(str(df.head(1).index.format()[0]))
if self.simenddate is not None:
if self.simenddate == "now":
end_date = self.get_date_from_iso8601_str(str(datetime.now()))
else:
end_date = self.get_date_from_iso8601_str(self.simenddate)
else:
end_date = self.get_date_from_iso8601_str(str(df.tail(1).index.format()[0]))
simDate = self.get_date_from_iso8601_str(str(self.state.last_df_index))
trading_data = self.get_smart_switch_historical_data_chained(
self.market,
self.granularity,
str(start_date),
str(end_date),
)
if self.granularity == Granularity.ONE_HOUR:
simDate = self.get_date_from_iso8601_str(str(simDate))
sim_rounded = pd.Series(simDate).dt.round("60min")
simDate = sim_rounded[0]
elif self.granularity == Granularity.FIFTEEN_MINUTES:
simDate = self.get_date_from_iso8601_str(str(simDate))
sim_rounded = pd.Series(simDate).dt.round("15min")
simDate = sim_rounded[0]
elif self.granularity == Granularity.FIVE_MINUTES:
simDate = self.get_date_from_iso8601_str(str(simDate))
sim_rounded = pd.Series(simDate).dt.round("5min")
simDate = sim_rounded[0]
dateFound = False
while dateFound is False:
try:
self.state.iterations = trading_data.index.get_loc(str(simDate)) + 1
dateFound = True
except Exception:
simDate += timedelta(seconds=self.granularity.value[0])
if self.get_date_from_iso8601_str(str(simDate)).isoformat() == self.get_date_from_iso8601_str(str(self.state.last_df_index)).isoformat():
self.state.iterations += 1
if self.state.iterations == 0:
self.state.iterations = 1
trading_dataCopy = trading_data.copy()
_technical_analysis = TechnicalAnalysis(trading_dataCopy, self.adjusttotalperiods, app=self)
# if 'bool(self.df_last["morning_star"].values[0])' not in df:
_technical_analysis.add_all()
df = _technical_analysis.get_df()
self.sim_smartswitch = False
elif self.smart_switch == 1 and _technical_analysis is None:
trading_dataCopy = trading_data.copy()
_technical_analysis = TechnicalAnalysis(trading_dataCopy, self.adjusttotalperiods, app=self)
if "morning_star" not in df:
_technical_analysis.add_all()
df = _technical_analysis.get_df()
else:
_technical_analysis = TechnicalAnalysis(self.trading_data, len(self.trading_data), app=self)
_technical_analysis.add_all()
df = _technical_analysis.get_df()
if self.is_sim:
self.df_last = self.get_interval(df, self.state.iterations)
else:
self.df_last = self.get_interval(df)
# Don't want index of new, unclosed candle, use the historical row setting to set index to last closed candle
if self.state.closed_candle_row != -2 and len(self.df_last.index.format()) > 0:
current_df_index = str(self.df_last.index.format()[0])
else:
current_df_index = self.state.last_df_index
formatted_current_df_index = f"{current_df_index} 00:00:00" if len(current_df_index) == 10 else current_df_index
current_sim_date = formatted_current_df_index
if self.state.iterations == 2:
# check if bot has open or closed order
# update data.json "opentrades"
if not self.disabletelegram:
if self.state.last_action == "BUY":
self.telegram_bot.add_open_order()
else:
self.telegram_bot.remove_open_order()
if (
(last_api_call_datetime.seconds > 60 or self.is_sim)
and self.smart_switch == 1
and self.sell_smart_switch == 1
and self.granularity != Granularity.FIVE_MINUTES
and self.state.last_action == "BUY"
):
if not self.is_sim or (self.is_sim and not self.simresultonly):
RichText.notify(
"Open order detected smart switching to 300 (5 min) granularity.",
self,
"normal",
)
if not self.telegramtradesonly:
self.notify_telegram(self.market + " open order detected smart switching to 300 (5 min) granularity")
if self.is_sim:
self.sim_smartswitch = True
self.granularity = Granularity.FIVE_MINUTES
list(map(self.s.cancel, self.s.queue))
self.s.enter(5, 1, self.execute_job, ())
if (
(last_api_call_datetime.seconds > 60 or self.is_sim)
and self.smart_switch == 1
and self.sell_smart_switch == 1
and self.granularity == Granularity.FIVE_MINUTES
and self.state.last_action == "SELL"
):
if not self.is_sim or (self.is_sim and not self.simresultonly):
RichText.notify(
"Sell detected smart switching to 3600 (1 hour) granularity.",
self,
"normal",
)
if not self.telegramtradesonly:
self.notify_telegram(self.market + " sell detected smart switching to 3600 (1 hour) granularity")
if self.is_sim:
self.sim_smartswitch = True
self.granularity = Granularity.ONE_HOUR
list(map(self.s.cancel, self.s.queue))
self.s.enter(5, 1, self.execute_job, ())
# use actual sim mode date to check smartchswitch
if (
(last_api_call_datetime.seconds > 60 or self.is_sim)
and self.smart_switch == 1
and self.granularity == Granularity.ONE_HOUR
and self.is_1h_ema1226_bull(current_sim_date) is True
and self.is_6h_ema1226_bull(current_sim_date) is True
):
if not self.is_sim or (self.is_sim and not self.simresultonly):
RichText.notify(
"Smart switch from granularity 3600 (1 hour) to 900 (15 min).",
self,
"normal",
)
if self.is_sim:
self.sim_smartswitch = True
if not self.telegramtradesonly:
self.notify_telegram(self.market + " smart switch from granularity 3600 (1 hour) to 900 (15 min)")
self.granularity = Granularity.FIFTEEN_MINUTES
list(map(self.s.cancel, self.s.queue))
self.s.enter(5, 1, self.execute_job, ())
# use actual sim mode date to check smartchswitch
if (
(last_api_call_datetime.seconds > 60 or self.is_sim)
and self.smart_switch == 1
and self.granularity == Granularity.FIFTEEN_MINUTES
and self.is_1h_ema1226_bull(current_sim_date) is False
and self.is_6h_ema1226_bull(current_sim_date) is False
):
if not self.is_sim or (self.is_sim and not self.simresultonly):
RichText.notify(
"Smart switch from granularity 900 (15 min) to 3600 (1 hour).",
self,
"normal",
)
if self.is_sim:
self.sim_smartswitch = True
if not self.telegramtradesonly:
self.notify_telegram(f"{self.market} smart switch from granularity 900 (15 min) to 3600 (1 hour)")
self.granularity = Granularity.ONE_HOUR
list(map(self.s.cancel, self.s.queue))
self.s.enter(5, 1, self.execute_job, ())
if self.exchange == Exchange.BINANCE and self.granularity == Granularity.ONE_DAY:
if len(df) < 250:
# data frame should have 250 rows, if not retry
RichText.notify(f"Data frame length is < 250 ({str(len(df))})", self, "error")
list(map(self.s.cancel, self.s.queue))
self.s.enter(300, 1, self.execute_job, ())
else:
# verify 300 rows - subtract 34% to allow small buffer if API is acting up.
adjusted_periods = self.adjusttotalperiods - (self.adjusttotalperiods * 0.30)
if len(df) < adjusted_periods: # If 300 is required, set adjusttotalperiods in config to 300 * 30%.
if not self.is_sim:
# data frame should have 300 rows or equal to adjusted total rows if set, if not retry
RichText.notify(
f"error: data frame length is < {str(int(adjusted_periods))} ({str(len(df))})",
self,
"error",
)
# pause for 10 seconds to prevent multiple calls immediately
time.sleep(10)
list(map(self.s.cancel, self.s.queue))
self.s.enter(
300,
1,
self.execute_job,
(),
)
if len(self.df_last) > 0:
# last_action polling if live
if self.is_live:
last_action_current = self.state.last_action
# If using websockets make this call every minute instead of each iteration
if self.websocket and not self.is_sim:
if last_api_call_datetime.seconds > 60:
self.state.poll_last_action()
else:
self.state.poll_last_action()
if last_action_current != self.state.last_action:
RichText.notify(
f"Last action change detected from {last_action_current} to {self.state.last_action}.",
self,
"normal",
)
if not self.telegramtradesonly:
self.notify_telegram(f"{self.market} last_action change detected from {last_action_current} to {self.state.last_action}")
# this is used to reset variables if error occurred during trade process
# make sure signals and telegram info is set correctly, close bot if needed on sell
if self.state.action == "check_action" and self.state.last_action == "BUY":
self.state.trade_error_cnt = 0
self.state.trailing_buy = False
self.state.action = None
self.state.trailing_buy_immediate = False
if not self.disabletelegram:
self.telegram_bot.add_open_order()
if not self.ignorepreviousbuy:
RichText.notify(f"{self.market} ({self.print_granularity()}) - {datetime.today().strftime('%Y-%m-%d %H:%M:%S')}", self, "warning")
RichText.notify("Catching BUY that occurred previously. Updating signal information.", self, "warning")
if not self.telegramtradesonly and not self.disabletelegram:
self.notify_telegram(
self.market
+ " ("
+ self.print_granularity()
+ ") - "
+ datetime.today().strftime("%Y-%m-%d %H:%M:%S")
+ "\n"
+ "Catching BUY that occurred previously. Updating signal information."
)
elif self.state.action == "check_action" and self.state.last_action == "SELL":
self.state.prevent_loss = False
self.state.trailing_sell = False
self.state.trailing_sell_immediate = False
self.state.tsl_triggered = False
self.state.tsl_pcnt = float(self.trailing_stop_loss)
self.state.tsl_trigger = float(self.trailing_stop_loss_trigger)
self.state.tsl_max = False
self.state.trade_error_cnt = 0
self.state.action = None
self.telegram_bot.remove_open_order()
if not self.ignoreprevioussell:
RichText.notify(f"{self.market} ({self.print_granularity()}) - {datetime.today().strftime('%Y-%m-%d %H:%M:%S')}", self, "warning")
RichText.notify("Catching SELL that occurred previously. Updating signal information.", self, "warning")
if not self.telegramtradesonly:
self.notify_telegram(
self.market
+ " ("
+ self.print_granularity()
+ ") - "
+ datetime.today().strftime("%Y-%m-%d %H:%M:%S")
+ "\n"
+ "Catching SELL that occurred previously. Updating signal information."
)
self.telegram_bot.close_trade(
str(self.get_date_from_iso8601_str(str(datetime.now()))),
0,
0,
)
if self.exitaftersell:
RichText.notify("Exit after sell! (\"exitaftersell\" is enabled)", self, "warning")
sys.exit(0)
if self.price < 0.000001:
raise Exception(f"{self.market} is unsuitable for trading, quote self.price is less than 0.000001!")
try:
# technical indicators
ema12gtema26 = bool(self.df_last["ema12gtema26"].values[0])
ema12gtema26co = bool(self.df_last["ema12gtema26co"].values[0])
goldencross = bool(self.df_last["goldencross"].values[0])
macdgtsignal = bool(self.df_last["macdgtsignal"].values[0])
macdgtsignalco = bool(self.df_last["macdgtsignalco"].values[0])
ema12ltema26co = bool(self.df_last["ema12ltema26co"].values[0])
macdltsignalco = bool(self.df_last["macdltsignalco"].values[0])
obv_pc = float(self.df_last["obv_pc"].values[0])
elder_ray_buy = bool(self.df_last["eri_buy"].values[0])
elder_ray_sell = bool(self.df_last["eri_sell"].values[0])
closegtbb20_upperco = bool(self.df_last["closegtbb20_upperco"].values[0])
closeltbb20_lowerco = bool(self.df_last["closeltbb20_lowerco"].values[0])
# if simulation, set goldencross based on actual sim date
if self.is_sim:
if self.adjusttotalperiods < 200:
goldencross = False
else:
goldencross = self.is_1h_sma50200_bull(current_sim_date)
except KeyError as err:
RichText.notify(err, self, "error")
sys.exit()
# Log data for Telegram Bot
self.telegram_bot.add_indicators("EMA", ema12gtema26 or ema12gtema26co)
if not self.disablebuyelderray:
self.telegram_bot.add_indicators("ERI", elder_ray_buy)
if self.disablebullonly:
self.telegram_bot.add_indicators("BULL", goldencross)
if not self.disablebuymacd:
self.telegram_bot.add_indicators("MACD", macdgtsignal or macdgtsignalco)
if not self.disablebuyobv:
self.telegram_bot.add_indicators("OBV", float(obv_pc) > 0)
if self.is_sim:
# Reset the Strategy so that the last record is the current sim date
# To allow for calculations to be done on the sim date being processed
sdf = df[df["date"] <= current_sim_date].tail(self.adjusttotalperiods)
strategy = Strategy(self, self.state, sdf, sdf.index.get_loc(str(current_sim_date)) + 1)
else:
strategy = Strategy(self, self.state, df)
trailing_action_logtext = ""
# determine current action, indicatorvalues will be empty if custom Strategy are disabled or it's debug is False
self.state.action, indicatorvalues = strategy.get_action(self.state, self.price, current_sim_date, self.websocket_connection)
immediate_action = False
margin, profit, sell_fee, change_pcnt_high = 0, 0, 0, 0
# Reset the TA so that the last record is the current sim date
# To allow for calculations to be done on the sim date being processed
if self.is_sim:
trading_dataCopy = self.trading_data[self.trading_data["date"] <= current_sim_date].tail(self.adjusttotalperiods).copy()
_technical_analysis = TechnicalAnalysis(trading_dataCopy, self.adjusttotalperiods, app=self)
if self.state.last_buy_size > 0 and self.state.last_buy_price > 0 and self.price > 0 and self.state.last_action == "BUY":
# update last buy high
if self.price > self.state.last_buy_high:
self.state.last_buy_high = self.price
if self.state.last_buy_high > 0:
change_pcnt_high = ((self.price / self.state.last_buy_high) - 1) * 100
else:
change_pcnt_high = 0
# buy and sell calculations
self.state.last_buy_fee = round(self.state.last_buy_size * self.get_taker_fee(), 8)
self.state.last_buy_filled = round(
((self.state.last_buy_size - self.state.last_buy_fee) / self.state.last_buy_price),
8,
)
# if not a simulation, sync with exchange orders
if not self.is_sim:
if self.websocket:
if last_api_call_datetime.seconds > 60:
self.state.exchange_last_buy = self.get_last_buy()
else:
self.state.exchange_last_buy = self.get_last_buy()
exchange_last_buy = self.state.exchange_last_buy
if exchange_last_buy is not None:
if self.state.last_buy_size != exchange_last_buy["size"]:
self.state.last_buy_size = exchange_last_buy["size"]
if self.state.last_buy_filled != exchange_last_buy["filled"]:
self.state.last_buy_filled = exchange_last_buy["filled"]
if self.state.last_buy_price != exchange_last_buy["price"]:
self.state.last_buy_price = exchange_last_buy["price"]
if self.exchange == Exchange.COINBASE or self.exchange == Exchange.COINBASEPRO or self.exchange == Exchange.KUCOIN:
if self.state.last_buy_fee != exchange_last_buy["fee"]:
self.state.last_buy_fee = exchange_last_buy["fee"]
margin, profit, sell_fee = calculate_margin(
buy_size=self.state.last_buy_size,
buy_filled=self.state.last_buy_filled,
buy_price=self.state.last_buy_price,
buy_fee=self.state.last_buy_fee,
sell_percent=self.get_sell_percent(),
sell_price=self.price,
sell_taker_fee=self.get_taker_fee(),
app=self,
)
# handle immediate sell actions
if self.manual_trades_only is False and strategy.is_sell_trigger(
self.state, self.price, _technical_analysis.get_trade_exit(self.price), margin, change_pcnt_high
):
self.state.action = "SELL"
immediate_action = True
# handle overriding wait actions
# (e.g. do not sell if sell at loss disabled!, do not buy in bull if bull only, manual trades only)
if self.manual_trades_only is True or (self.state.action != "WAIT" and strategy.is_wait_trigger(margin, goldencross)):
self.state.action = "WAIT"
immediate_action = False
# If buy signal, save the self.price and check for decrease/increase before buying.
if self.state.action == "BUY" and immediate_action is not True:
(
self.state.action,
self.state.trailing_buy,
trailing_action_logtext,
immediate_action,
) = strategy.check_trailing_buy(self.state, self.price)
# If sell signal, save the self.price and check for decrease/increase before selling.
if self.state.action == "SELL" and immediate_action is not True:
(
self.state.action,
self.state.trailing_sell,
trailing_action_logtext,
immediate_action,
) = strategy.check_trailing_sell(self.state, self.price)
if self.enableimmediatebuy:
if self.state.action == "BUY":
immediate_action = True
if not self.is_sim and self.telegrambotcontrol:
manual_buy_sell = self.telegram_bot.check_manual_buy_sell()
if not manual_buy_sell == "WAIT":
self.state.action = manual_buy_sell
immediate_action = True
# polling is every 5 minutes (even for hourly intervals), but only process once per interval
if immediate_action is True or self.state.last_df_index != current_df_index:
precision = 4
if self.price < 0.01:
precision = 8
# Since precision does not change after this point, it is safe to prepare a tailored `truncate()` that would
# work with this precision. It should save a couple of `precision` uses, one for each `truncate()` call.
truncate = functools.partial(_truncate, n=precision)
def _candlestick(candlestick_status: str = "") -> None:
if candlestick_status == "":
return
self.table_console = Table(title=None, box=None, show_header=False, show_footer=False)
self.table_console.add_row(
RichText.styled_text("Bot1", "magenta"),
RichText.styled_text(formatted_current_df_index, "white"),
RichText.styled_text(self.market, "yellow"),
RichText.styled_text(self.print_granularity(), "yellow"),
RichText.styled_text(candlestick_status, "violet"),
)
self.console_term.print(self.table_console)
if self.disablelog is False:
self.console_log.print(self.table_console)
self.table_console = Table(title=None, box=None, show_header=False, show_footer=False) # clear table
def _notify(notification: str = "", level: str = "normal") -> None:
if notification == "":
return
if level == "warning":
color = "dark_orange"
elif level == "error":
color = "red1"
elif level == "critical":
color = "red1 blink"
elif level == "info":
color = "yellow blink"
else:
color = "violet"
self.table_console = Table(title=None, box=None, show_header=False, show_footer=False)
self.table_console.add_row(
RichText.styled_text("Bot1", "magenta"),
RichText.styled_text(formatted_current_df_index, "white"),
RichText.styled_text(self.market, "yellow"),
RichText.styled_text(self.print_granularity(), "yellow"),
RichText.styled_text(notification, color),
)
self.console_term.print(self.table_console)
if self.disablelog is False:
self.console_log.print(self.table_console)
self.table_console = Table(title=None, box=None, show_header=False, show_footer=False) # clear table
if not self.is_sim:
df_high = df[df["date"] <= current_sim_date]["close"].max()
df_low = df[df["date"] <= current_sim_date]["close"].min()
range_start = str(df.iloc[0, 0])
range_end = str(df.iloc[len(df) - 1, 0])
else:
df_high = df["close"].max()
df_low = df["close"].min()
if len(df) > self.adjusttotalperiods:
range_start = str(df.iloc[self.state.iterations - self.adjusttotalperiods, 0]) # noqa: F841
else:
# RichText.notify(f"Trading dataframe length {len(df)} is greater than expected {self.adjusttotalperiods}", self, "warning")
range_start = str(df.iloc[self.state.iterations - len(df), 0]) # noqa: F841
range_end = str(df.iloc[self.state.iterations - 1, 0]) # noqa: F841
df_swing = round(((df_high - df_low) / df_low) * 100, 2)
df_near_high = round(((self.price - df_high) / df_high) * 100, 2)
if self.state.last_action == "BUY":
if self.state.last_buy_size > 0:
margin_text = truncate(margin) + "%"
else:
margin_text = "0%"
if self.is_sim:
# save margin for summary if open trade
self.state.open_trade_margin_float = margin
self.state.open_trade_margin = margin_text
else:
margin_text = ""
args = [
arg
for arg in [
RichText.styled_text("Bot1", "magenta"),
RichText.styled_text(formatted_current_df_index, "white"),
RichText.styled_text(self.market, "yellow"),
RichText.styled_text(self.print_granularity(), "yellow"),
RichText.styled_text(str(self.price), "white"),
RichText.bull_bear(goldencross),
RichText.number_comparison(
"EMA12/26:",
round(self.df_last["ema12"].values[0], 2),
round(self.df_last["ema26"].values[0], 2),
ema12gtema26co or ema12ltema26co,
self.disablebuyema,
),
RichText.number_comparison(
"MACD:",
round(self.df_last["macd"].values[0], 2),
round(self.df_last["signal"].values[0], 2),
macdgtsignalco or macdltsignalco,
self.disablebuymacd,
),
RichText.styled_text(trailing_action_logtext),
RichText.on_balance_volume(
self.df_last["obv"].values[0],
self.df_last["obv_pc"].values[0],
self.disablebuyobv,
),
RichText.elder_ray(elder_ray_buy, elder_ray_sell, self.disablebuyelderray),
RichText.number_comparison(
"BBU:",
round(self.df_last["close"].values[0], 2),
round(self.df_last["bb20_upper"].values[0], 2),
closegtbb20_upperco or closeltbb20_lowerco,
self.disablebuybbands_s1,
),
RichText.number_comparison(
"BBL:",
round(self.df_last["bb20_lower"].values[0], 2),
round(self.df_last["close"].values[0], 2),
closegtbb20_upperco or closeltbb20_lowerco,
self.disablebuybbands_s1,
),
RichText.action_text(self.state.action),
RichText.last_action_text(self.state.last_action),
RichText.styled_label_text(
"DF-H/L",
"white",
f"{str(df_high)} / {str(df_low)} ({df_swing}%)",
"cyan",
),
RichText.styled_label_text("Near-High", "white", f"{df_near_high}%", "cyan"), # price near high
RichText.styled_label_text("Range", "white", f"{range_start} <-> {range_end}", "cyan") if (self.term_width > 120) else None,
RichText.margin_text(margin_text, self.state.last_action),
RichText.delta_text(
self.price,
self.state.last_buy_price,
precision,
self.state.last_action,
),
]
if arg
]
if not self.is_sim or (self.is_sim and not self.simresultonly):
self.table_console.add_row(*args)
self.console_term.print(self.table_console)
if self.disablelog is False:
self.console_log.print(self.table_console)
self.table_console = Table(title=None, box=None, show_header=False, show_footer=False) # clear table
if self.state.last_action == "BUY":
# display support, resistance and fibonacci levels
if not self.is_sim:
_notify(_technical_analysis.print_support_resistance_fibonacci_levels(self.price))
# if a buy signal
if self.state.action == "BUY":
self.state.last_buy_price = self.price
self.state.last_buy_high = self.state.last_buy_price
# if live
if self.is_live:
self.insufficientfunds = False
try:
self.account.quote_balance_before = self.account.get_balance(self.quote_currency)
self.state.last_buy_size = float(self.account.quote_balance_before)
if self.buymaxsize and self.buylastsellsize and self.state.minimum_order_quote(quote=self.state.last_sell_size, balancechk=True):
self.state.last_buy_size = self.state.last_sell_size
elif self.buymaxsize and self.state.last_buy_size > self.buymaxsize:
self.state.last_buy_size = self.buymaxsize
if self.account.quote_balance_before < self.state.last_buy_size:
self.insufficientfunds = True
except Exception:
pass
if not self.insufficientfunds and self.buyminsize < self.account.quote_balance_before:
if not self.is_live:
if not self.is_sim or (self.is_sim and not self.simresultonly):
_notify(f"*** Executing SIMULATION Buy Order at {str(self.price)} ***", "info")
else:
_notify("*** Executing LIVE Buy Order ***", "info")
# display balances
_notify(f"{self.base_currency} balance before order: {str(self.account.base_balance_before)}", "debug")
_notify(f"{self.quote_currency} balance before order: {str(self.account.quote_balance_before)}", "debug")
# place the buy order
resp_error = 0
try:
self.market_buy(
self.market,
self.state.last_buy_size,
self.get_buy_percent(),
)
except Exception as err:
_notify(f"Trade Error: {err}", "error")
resp_error = 1
if resp_error == 0:
self.account.base_balance_after = 0
self.account.quote_balance_after = 0
try:
ac = self.account.get_balance()
df_base = ac[ac["currency"] == self.base_currency]["available"]
self.account.base_balance_after = 0.0 if len(df_base) == 0 else float(df_base.values[0])
df_quote = ac[ac["currency"] == self.quote_currency]["available"]
self.account.quote_balance_after = 0.0 if len(df_quote) == 0 else float(df_quote.values[0])
bal_error = 0
except Exception as err:
bal_error = 1
_notify(
f"Error: Balance not retrieved after trade for {self.market}",
"warning",
)
_notify(f"API Error Msg: {err}", "warning")
if bal_error == 0:
self.state.trade_error_cnt = 0
self.state.trailing_buy = False
self.state.last_action = "BUY"
self.state.action = "DONE"
self.state.trailing_buy_immediate = False
self.telegram_bot.add_open_order()
if not self.disabletelegram:
self.notify_telegram(
self.market
+ " ("
+ self.print_granularity()
+ ") - "
+ datetime.today().strftime("%Y-%m-%d %H:%M:%S")
+ "\n"
+ "BUY at "
+ str(self.price)
)
else:
# set variable to trigger to check trade on next iteration
self.state.action = "check_action"
_notify(f"{self.market} - Error occurred while checking balance after BUY. Last transaction check will happen shortly.")
if not self.disabletelegramerrormsgs:
self.notify_telegram(
self.market + " - Error occurred while checking balance after BUY. Last transaction check will happen shortly."
)
else: # there was a response error
# only attempt BUY 3 times before exception to prevent continuous loop
self.state.trade_error_cnt += 1
if self.state.trade_error_cnt >= 2: # 3 attempts made
raise Exception("Trade Error: BUY transaction attempted 3 times. Check log for errors")
# set variable to trigger to check trade on next iteration
self.state.action = "check_action"
self.state.last_action = None
_notify(
f"API Error: Unable to place buy order for {self.market}.",
"warning",
)
if not self.disabletelegramerrormsgs:
self.notify_telegram(f"API Error: Unable to place buy order for {self.market}")
time.sleep(30)
else:
if not self.is_live:
if not self.is_sim or (self.is_sim and not self.simresultonly):
_notify(f"*** Skipping SIMULATION Buy Order at {str(self.price)} -- Insufficient Funds ***", "warning")
else:
_notify("*** Skipping LIVE Buy Order -- Insufficient Funds ***", "warning")
self.state.last_api_call_datetime -= timedelta(seconds=60)
# if not live
else:
if self.state.last_buy_size == 0 and self.state.last_buy_filled == 0:
# sim mode can now use buymaxsize as the amount used for a buy
if self.buymaxsize > 0:
self.state.last_buy_size = self.buymaxsize
self.state.first_buy_size = self.buymaxsize
else:
# TODO: calculate correct buy amount based on quote currency balance
self.state.last_buy_size = 1
self.state.first_buy_size = 1
# add option for buy last sell size
elif (
self.buymaxsize > 0
and self.buylastsellsize
and self.state.last_sell_size > self.state.minimum_order_quote(quote=self.state.last_sell_size, balancechk=True)
):
self.state.last_buy_size = self.state.last_sell_size
self.state.buy_count = self.state.buy_count + 1
self.state.buy_sum = self.state.buy_sum + self.state.last_buy_size
self.state.trailing_buy = False
self.state.action = "DONE"
self.state.trailing_buy_immediate = False
if not self.disabletelegram:
self.notify_telegram(
self.market
+ " ("
+ self.print_granularity()
+ ") - "
+ str(current_sim_date)
+ "\n - TEST BUY at "
+ str(self.price)
+ "\n - Buy Size: "
+ str(_truncate(self.state.last_buy_size, 4))
)
if not self.is_sim or (self.is_sim and not self.simresultonly):
_notify(f"*** Executing SIMULATION Buy Order at {str(self.price)} ***", "info")
bands = _technical_analysis.get_fibonacci_retracement_levels(float(self.price))
if not self.is_sim:
_notify(f"Fibonacci Retracement Levels: {str(bands)}")
_technical_analysis.print_support_resistance_levels_v2()
if len(bands) >= 1 and len(bands) <= 2:
if len(bands) == 1:
first_key = list(bands.keys())[0]
if first_key == "ratio1":
self.state.fib_low = 0
self.state.fib_high = bands[first_key]
if first_key == "ratio1_618":
self.state.fib_low = bands[first_key]
self.state.fib_high = bands[first_key] * 2
else:
self.state.fib_low = bands[first_key]
elif len(bands) == 2:
first_key = list(bands.keys())[0]
second_key = list(bands.keys())[1]
self.state.fib_low = bands[first_key]
self.state.fib_high = bands[second_key]
self.trade_tracker = pd.concat(
[
self.trade_tracker,
pd.DataFrame(
{
"Datetime": str(current_sim_date),
"Market": self.market,
"Action": "BUY",
"Price": self.price,
"Quote": self.state.last_buy_size,
"Base": float(self.state.last_buy_size) / float(self.price),
"DF_High": df[df["date"] <= current_sim_date]["close"].max(),
"DF_Low": df[df["date"] <= current_sim_date]["close"].min(),
},
index=[0],
),
],
)
self.state.in_open_trade = True
self.state.last_action = "BUY"
self.state.last_api_call_datetime -= timedelta(seconds=60)
if self.save_graphs:
if self.adjusttotalperiods < 200:
_notify("Trading Graphs can only be generated when dataframe has more than 200 periods.")
else:
tradinggraphs = TradingGraphs(_technical_analysis, self)
ts = datetime.now().timestamp()
filename = f"{self.market}_{self.print_granularity()}_buy_{str(ts)}.png"
# This allows graphs to be used in sim mode using the correct DF
if self.is_sim:
tradinggraphs.render_ema_and_macd(len(trading_dataCopy), "graphs/" + filename, True)
else:
tradinggraphs.render_ema_and_macd(len(self.trading_data), "graphs/" + filename, True)
# if a sell signal
elif self.state.action == "SELL":
# if live
if self.is_live:
if not self.is_live:
if not self.is_sim or (self.is_sim and not self.simresultonly):
_notify(f"*** Executing SIMULATION Sell Order at {str(self.price)} ***", "info")
else:
_notify("*** Executing LIVE Sell Order ***", "info")
# check balances before and display
self.account.base_balance_before = 0
self.account.quote_balance_before = 0
try:
self.account.base_balance_before = float(self.account.get_balance(self.base_currency))
self.account.quote_balance_before = float(self.account.get_balance(self.quote_currency))
except Exception:
pass
_notify(f"{self.base_currency} balance before order: {str(self.account.base_balance_before)}", "debug")
_notify(f"{self.quote_currency} balance before order: {str(self.account.quote_balance_before)}", "debug")
# execute a live market sell
baseamounttosell = float(self.account.base_balance_before) if self.sellfullbaseamount is True else float(self.state.last_buy_filled)
self.account.base_balance_after = 0
self.account.quote_balance_after = 0
# place the sell order
resp_error = 0
try:
self.market_sell(
self.market,
baseamounttosell,
self.get_sell_percent(),
)
except Exception as err:
_notify(f"Trade Error: {err}", "warning")
resp_error = 1
if resp_error == 0:
try:
self.account.base_balance_after = float(self.account.get_balance(self.base_currency))
self.account.quote_balance_after = float(self.account.get_balance(self.quote_currency))
bal_error = 0
except Exception as err:
bal_error = 1
_notify(
f"Error: Balance not retrieved after trade for {self.market}.",
"warning",
)
_notify(f"API Error Msg: {err}", "warning")
if bal_error == 0:
_notify(f"{self.base_currency} balance after order: {str(self.account.base_balance_after)}")
_notify(f"{self.quote_currency} balance after order: {str(self.account.quote_balance_after)}")
self.state.prevent_loss = False
self.state.trailing_sell = False
self.state.trailing_sell_immediate = False
self.state.tsl_triggered = False
self.state.tsl_pcnt = float(self.trailing_stop_loss)
self.state.tsl_trigger = float(self.trailing_stop_loss_trigger)
self.state.tsl_max = False
self.state.trade_error_cnt = 0
self.state.last_action = "SELL"
self.state.action = "DONE"
if not self.disabletelegram:
self.notify_telegram(
self.market
+ " ("
+ self.print_granularity()
+ ") - "
+ datetime.today().strftime("%Y-%m-%d %H:%M:%S")
+ "\n"
+ "SELL at "
+ str(self.price)
+ " (margin: "
+ margin_text
+ ", delta: "
+ str(
round(
self.price - self.state.last_buy_price,
precision,
)
)
+ ")"
)
self.telegram_bot.close_trade(
str(self.get_date_from_iso8601_str(str(datetime.now()))),
str(self.price),
margin_text,
)
if self.exitaftersell and self.startmethod not in ("telegram"):
RichText.notify("Exit after sell! (\"exitaftersell\" is enabled)", self, "warning")
sys.exit(0)
else:
# set variable to trigger to check trade on next iteration
self.state.action = "check_action"
_notify(
f"{self.market} - Error occurred while checking balance after SELL. Last transaction check will happen shortly.",
"error",
)
if not self.disabletelegramerrormsgs:
self.notify_telegram(
self.market + " - Error occurred while checking balance after SELL. Last transaction check will happen shortly."
)
else: # there was an error
# only attempt SELL 3 times before exception to prevent continuous loop
self.state.trade_error_cnt += 1
if self.state.trade_error_cnt >= 2: # 3 attempts made
raise Exception("Trade Error: SELL transaction attempted 3 times. Check log for errors.")
# set variable to trigger to check trade on next iteration
self.state.action = "check_action"
self.state.last_action = None
_notify(
f"API Error: Unable to place SELL order for {self.market}.",
"warning",
)
if not self.disabletelegramerrormsgs:
self.notify_telegram(f"API Error: Unable to place SELL order for {self.market}")
time.sleep(30)
self.state.last_api_call_datetime -= timedelta(seconds=60)
# if not live
else:
# TODO - improve and confirm logic to simulate sell
margin, profit, sell_fee = calculate_margin(
buy_size=self.state.last_buy_size,
buy_filled=self.state.last_buy_filled,
buy_price=self.state.last_buy_price,
buy_fee=self.state.last_buy_fee,
sell_percent=self.get_sell_percent(),
sell_price=self.price,
sell_taker_fee=self.get_taker_fee(),
app=self,
)
if self.state.last_buy_size > 0:
margin_text = truncate(margin) + "%"
else:
margin_text = "0%"
# save last buy before this sell to use in Sim Summary
self.state.previous_buy_size = self.state.last_buy_size
# preserve next sell values for simulator
self.state.sell_count = self.state.sell_count + 1
sell_size = (self.get_sell_percent() / 100) * (
(self.price / self.state.last_buy_price) * (self.state.last_buy_size - self.state.last_buy_fee)
)
self.state.last_sell_size = sell_size - sell_fee
self.state.sell_sum = self.state.sell_sum + self.state.last_sell_size
# added to track profit and loss margins during sim runs
self.state.margintracker += float(margin)
self.state.profitlosstracker += float(profit)
self.state.feetracker += float(sell_fee)
self.state.buy_tracker += float(self.state.last_buy_size)
if not self.disabletelegram:
self.notify_telegram(
self.market
+ " ("
+ self.print_granularity()
+ ") "
+ str(current_sim_date)
+ "\n - TEST SELL at "
+ str(str(self.price))
+ " (margin: "
+ margin_text
+ ", delta: "
+ str(
round(
self.price - self.state.last_buy_price,
precision,
)
)
+ ")"
)
if self.price > 0:
margin_text = truncate(margin) + "%"
else:
margin_text = "0%"
if not self.is_sim or (self.is_sim and not self.simresultonly):
_notify(
f"*** Executing SIMULATION Sell Order at {str(self.price)} | Buy: {str(self.state.last_buy_price)} ({str(self.price - self.state.last_buy_price)}) | Profit: {str(profit)} on {_truncate(self.state.last_buy_size, precision)} | Fees: {str(round(sell_fee, precision))} | Margin: {margin_text} ***",
"info",
)
self.trade_tracker = pd.concat(
[
self.trade_tracker,
pd.DataFrame(
{
"Datetime": str(current_sim_date),
"Market": self.market,
"Action": "SELL",
"Price": self.price,
"Quote": self.state.last_sell_size,
"Base": self.state.last_buy_filled,
"Margin": margin,
"Profit": profit,
"Fee": sell_fee,
"DF_High": df[df["date"] <= current_sim_date]["close"].max(),
"DF_Low": df[df["date"] <= current_sim_date]["close"].min(),
},
index=[0],
),
],
)
self.state.in_open_trade = False
self.state.last_api_call_datetime -= timedelta(seconds=60)
self.state.last_action = "SELL"
self.state.prevent_loss = False
self.state.trailing_sell = False
self.state.trailing_sell_immediate = False
self.state.tsl_triggered = False
if self.trailing_stop_loss:
self.state.tsl_pcnt = float(self.trailing_stop_loss)
if self.trailing_stop_loss_trigger:
self.state.tsl_trigger = float(self.trailing_stop_loss_trigger)
# adjust the next simulation buy with the current balance
self.state.last_buy_size += profit
self.state.tsl_max = False
self.state.action = "DONE"
if self.save_graphs:
tradinggraphs = TradingGraphs(_technical_analysis, self)
ts = datetime.now().timestamp()
filename = f"{self.market}_{self.print_granularity()}_sell_{str(ts)}.png"
# This allows graphs to be used in sim mode using the correct DF
if self.is_sim:
tradinggraphs.render_ema_and_macd(len(trading_dataCopy), "graphs/" + filename, True)
else:
tradinggraphs.render_ema_and_macd(len(trading_data), "graphs/" + filename, True)
if self.exitaftersell:
RichText.notify("Exit after sell! (\"exitaftersell\" is enabled)", self, "warning")
sys.exit(0)
self.state.last_df_index = str(self.df_last.index.format()[0])
if self.logbuysellinjson is True and self.state.action == "DONE" and len(self.trade_tracker) > 0:
_notify(self.trade_tracker.loc[len(self.trade_tracker) - 1].to_json())
if self.state.action == "DONE" and indicatorvalues != "" and not self.disabletelegram:
self.notify_telegram(indicatorvalues)
# summary at the end of the simulation
if self.is_sim and self.state.iterations == len(df):
self._simulation_summary()
self._simulation_save_orders()
if self.state.last_buy_size <= 0 and self.state.last_buy_price <= 0 and self.state.last_action != "BUY":
self.telegram_bot.add_info(
f'Current price: {str(self.price)}{trailing_action_logtext} | {str(round(((self.price-df["close"].max()) / df["close"].max())*100, 2))}% from DF HIGH',
round(self.price, 4),
str(round(df["close"].max(), 4)),
str(
round(
((self.price - df["close"].max()) / df["close"].max()) * 100,
2,
)
)
+ "%",
self.state.action,
)
if self.state.last_action == "BUY" and self.state.in_open_trade and last_api_call_datetime.seconds > 60:
# update margin for telegram bot
self.telegram_bot.add_margin(
str(_truncate(margin, 4) + "%") if self.state.in_open_trade is True else " ",
str(_truncate(profit, 2)) if self.state.in_open_trade is True else " ",
self.price,
change_pcnt_high,
self.state.action,
)
# Update the watchdog_ping
self.telegram_bot.update_watch_dog_ping()
# decrement ignored iteration
if self.is_sim and self.smart_switch:
self.state.iterations = self.state.iterations - 1
# if live but not websockets
if not self.disabletracker and self.is_live and not self.websocket_connection:
# update order tracker csv
if self.exchange == Exchange.BINANCE:
self.account.save_tracker_csv(self.market)
elif self.exchange == Exchange.COINBASE or self.exchange == Exchange.COINBASEPRO or self.exchange == Exchange.KUCOIN:
self.account.save_tracker_csv()
if self.is_sim:
if self.state.iterations < len(df):
if self.sim_speed in ["fast", "fast-sample"]:
# fast processing
list(map(self.s.cancel, self.s.queue))
self.s.enter(
0,
1,
self.execute_job,
(),
)
else:
# slow processing
list(map(self.s.cancel, self.s.queue))
self.s.enter(
1,
1,
self.execute_job,
(),
)
else:
list(map(self.s.cancel, self.s.queue))
if (
self.websocket_connection
and self.websocket_connection is not None
and (isinstance(self.websocket_connection.tickers, pd.DataFrame) and len(self.websocket_connection.tickers) == 1)
and (isinstance(self.websocket_connection.candles, pd.DataFrame) and len(self.websocket_connection.candles) == self.adjusttotalperiods)
):
# poll every 5 seconds (self.websocket_connection)
self.s.enter(
5,
1,
self.execute_job,
(),
)
else:
if self.websocket and not self.is_sim:
# poll every 15 seconds (waiting for self.websocket_connection)
self.s.enter(
15,
1,
self.execute_job,
(),
)
else:
# poll every 1 minute (no self.websocket_connection)
self.s.enter(
60,
1,
self.execute_job,
(),
)
def run(self):
try:
message = "Starting "
if self.exchange == Exchange.COINBASE:
message += "Coinbase bot"
if self.websocket and not self.is_sim:
RichText.notify("Opening websocket to Coinbase", self, "normal")
print("")
self.websocket_connection = CWebSocketClient([self.market], self.granularity, app=self)
self.websocket_connection.start()
elif self.exchange == Exchange.COINBASEPRO:
message += "Coinbase Pro bot"
if self.websocket and not self.is_sim:
RichText.notify("Opening websocket to Coinbase Pro", self, "normal")
print("")
self.websocket_connection = CWebSocketClient([self.market], self.granularity, app=self)
self.websocket_connection.start()
elif self.exchange == Exchange.BINANCE:
message += "Binance bot"
if self.websocket and not self.is_sim:
RichText.notify("Opening websocket to Binance", self, "normal")
print("")
self.websocket_connection = BWebSocketClient([self.market], self.granularity, app=self)
self.websocket_connection.start()
elif self.exchange == Exchange.KUCOIN:
message += "Kucoin bot"
if self.websocket and not self.is_sim:
RichText.notify("Opening websocket to Kucoin", self, "normal")
print("")
self.websocket_connection = KWebSocketClient([self.market], self.granularit, app=self)
self.websocket_connection.start()
smartswitchstatus = "enabled" if self.smart_switch else "disabled"
message += f" for {self.market} using granularity {self.print_granularity()}. Smartswitch {smartswitchstatus}"
if self.startmethod in ("standard", "telegram") and not self.disabletelegram:
self.notify_telegram(message)
# initialise and start application
self.initialise()
if self.is_sim and self.simenddate:
try:
# if simenddate is set, then remove trailing data points
self.trading_data = self.trading_data[self.trading_data["date"] <= self.simenddate]
except Exception:
pass
try:
self.execute_job()
self.s.run()
except (KeyboardInterrupt, SystemExit):
raise
except (BaseException, Exception) as e: # pylint: disable=broad-except
if self.autorestart:
# Wait 30 second and try to relaunch application
seconds = 30
RichText.notify(f"Restarting application in {seconds} seconds after exception: {repr(e)}", self, "critical")
time.sleep(seconds)
if not self.disabletelegram:
self.notify_telegram(f"Auto restarting bot for {self.market} after exception: {repr(e)}")
# Cancel the events queue
map(self.s.cancel, self.s.queue)
# Restart the app
self.execute_job()
self.s.run()
else:
raise
# catches a keyboard break of app, exits gracefully
except (KeyboardInterrupt, SystemExit):
if self.websocket and not self.is_sim:
signal.signal(signal.SIGINT, signal_handler) # disable ctrl/cmd+c
RichText.notify("Shutting down bot...", self, "warning")
RichText.notify("Please wait while threads complete gracefully....", self, "warning")
# else:
# RichText.notify("Shutting down bot...", self, "warning")
try:
try:
self.telegram_bot.remove_active_bot()
except Exception:
pass
if self.websocket and self.websocket_connection and not self.is_sim:
try:
self.websocket_connection.close()
except Exception:
pass
sys.exit(0)
except SystemExit:
# pylint: disable=protected-access
os._exit(0)
except (BaseException, Exception) as e: # pylint: disable=broad-except
# catch all not managed exceptions and send a Telegram message if configured
if not self.disabletelegramerrormsgs:
self.notify_telegram(f"Bot for {self.market} got an exception: {repr(e)}")
try:
self.telegram_bot.remove_active_bot()
except Exception:
pass
RichText.notify(repr(e), self, "critical")
# pylint: disable=protected-access
os._exit(0)
# raise
def market_buy(self, market, quote_currency, buy_percent=100):
if self.is_live is True:
if isinstance(buy_percent, int):
if buy_percent > 0 and buy_percent < 100:
quote_currency = (buy_percent / 100) * quote_currency
if self.exchange == Exchange.COINBASE:
api = CBAuthAPI(self.api_key, self.api_secret, self.api_url, app=self)
return api.market_buy(market, float(_truncate(quote_currency, 8)))
elif self.exchange == Exchange.COINBASEPRO:
api = CAuthAPI(self.api_key, self.api_secret, self.api_passphrase, self.api_url, app=self)
return api.market_buy(market, float(_truncate(quote_currency, 8)))
elif self.exchange == Exchange.KUCOIN:
api = KAuthAPI(self.api_key, self.api_secret, self.api_passphrase, self.api_url, use_cache=self.usekucoincache, app=self)
return api.market_buy(market, (float(quote_currency) - (float(quote_currency) * api.get_maker_fee())))
elif self.exchange == Exchange.BINANCE:
api = BAuthAPI(self.api_key, self.api_secret, self.api_url, recv_window=self.recv_window, app=self)
return api.market_buy(market, quote_currency)
else:
return None
def market_sell(self, market, base_currency, sell_percent=100):
if self.is_live is True:
if isinstance(sell_percent, int):
if sell_percent > 0 and sell_percent < 100:
base_currency = (sell_percent / 100) * base_currency
if self.exchange == Exchange.COINBASE:
api = CBAuthAPI(self.api_key, self.api_secret, self.api_url, app=self)
return api.market_sell(market, base_currency)
elif self.exchange == Exchange.COINBASEPRO:
api = CAuthAPI(self.api_key, self.api_secret, self.api_passphrase, self.api_url, app=self)
return api.market_sell(market, base_currency)
elif self.exchange == Exchange.BINANCE:
api = BAuthAPI(self.api_key, self.api_secret, self.api_url, recv_window=self.recv_window, app=self)
return api.market_sell(market, base_currency, use_fees=self.use_sell_fee)
elif self.exchange == Exchange.KUCOIN:
api = KAuthAPI(self.api_key, self.api_secret, self.api_passphrase, self.api_url, use_cache=self.usekucoincache, app=self)
return api.market_sell(market, base_currency)
else:
return None
def notify_telegram(self, msg: str) -> None:
"""
Send a given message to preconfigured Telegram. If the telegram isn't enabled, e.g. via `--disabletelegram`,
this method does nothing and returns immediately.
"""
if self.disabletelegram or not self.telegram:
return
assert self._chat_client is not None
self._chat_client.send(msg)
def initialise(self, banner=True):
self.account = TradingAccount(self)
Stats(self, self.account).show()
self.state = AppState(self, self.account)
self.state.init_last_action()
if self.is_sim:
# initial amounts for sims
self.state.last_buy_size = 1000
self.state.first_buy_size = 1000
if banner and not self.is_sim or (self.is_sim and not self.simresultonly):
self._generate_banner()
self.app_started = True
# run the first job immediately after starting
if self.is_sim:
if self.sim_speed in ["fast-sample", "slow-sample"]:
attempts = 0
if self.simstartdate is not None and self.simenddate is not None:
start_date = self.get_date_from_iso8601_str(self.simstartdate)
if self.simenddate == "now":
end_date = self.get_date_from_iso8601_str(str(datetime.now()))
else:
end_date = self.get_date_from_iso8601_str(self.simenddate)
elif self.simstartdate is not None and self.simenddate is None:
start_date = self.get_date_from_iso8601_str(self.simstartdate)
end_date = start_date + timedelta(minutes=(self.granularity.to_integer / 60) * self.adjusttotalperiods)
elif self.simenddate is not None and self.simstartdate is None:
if self.simenddate == "now":
end_date = self.get_date_from_iso8601_str(str(datetime.now()))
else:
end_date = self.get_date_from_iso8601_str(self.simenddate)
start_date = end_date - timedelta(minutes=(self.granularity.to_integer / 60) * self.adjusttotalperiods)
else:
end_date = self.get_date_from_iso8601_str(str(pd.Series(datetime.now()).dt.round(freq="H")[0]))
if self.exchange == Exchange.COINBASE or self.exchange == Exchange.COINBASEPRO:
end_date -= timedelta(hours=random.randint(0, 8760 * 3)) # 3 years in hours
else:
end_date -= timedelta(hours=random.randint(0, 8760 * 1))
start_date = self.get_date_from_iso8601_str(str(end_date))
start_date -= timedelta(minutes=(self.granularity.to_integer / 60) * self.adjusttotalperiods)
while len(self.trading_data) < self.adjusttotalperiods and attempts < 10:
if end_date.isoformat() > datetime.now().isoformat():
end_date = datetime.now()
if self.smart_switch == 1:
trading_data = self.get_smart_switch_historical_data_chained(
self.market,
self.granularity,
str(start_date),
str(end_date),
)
else:
trading_data = self.get_smart_switch_df(
trading_data,
self.market,
self.granularity,
start_date.isoformat(),
end_date.isoformat(),
)
attempts += 1
if self.extra_candles_found:
self.simstartdate = str(start_date)
self.simenddate = str(end_date)
self.extra_candles_found = True
if len(self.trading_data) < self.adjusttotalperiods:
raise Exception(
f"Unable to retrieve {str(self.adjusttotalperiods)} random sets of data between {start_date} and {end_date} in 10 attempts."
)
if banner:
text_box = TextBox(80, 26)
start_date = str(start_date.isoformat())
end_date = str(end_date.isoformat())
text_box.line("Sampling start", str(start_date))
text_box.line("Sampling end", str(end_date))
if self.simstartdate is None and len(self.trading_data) < self.adjusttotalperiods:
text_box.center(f"WARNING: Using less than {str(self.adjusttotalperiods)} intervals")
text_box.line("Interval size", str(len(self.trading_data)))
text_box.doubleLine()
else:
start_date = self.get_date_from_iso8601_str(str(datetime.now()))
start_date -= timedelta(minutes=(self.granularity.to_integer / 60) * 2)
end_date = start_date
start_date = pd.Series(start_date).dt.round(freq="H")[0]
end_date = pd.Series(end_date).dt.round(freq="H")[0]
if self.is_sim and self.simstartdate:
start_date = self.simstartdate
else:
start_date -= timedelta(minutes=(self.granularity.to_integer / 60) * self.adjusttotalperiods)
if self.is_sim and self.simenddate:
end_date = self.simenddate
else:
if end_date.isoformat() > datetime.now().isoformat():
end_date = datetime.now()
if self.smart_switch == 1:
self.trading_data = self.get_smart_switch_historical_data_chained(
self.market,
self.granularity,
str(start_date),
str(end_date),
)
else:
self.trading_data = self.get_smart_switch_df(
self.trading_data,
self.market,
self.granularity,
self.get_date_from_iso8601_str(str(start_date)).isoformat(),
self.get_date_from_iso8601_str(str(end_date)).isoformat(),
)
def _simulation_summary(self) -> dict:
simulation = {
"config": {},
"data": {
"open_buy_excluded": 1,
"buy_count": 0,
"sell_count": 0,
"first_trade": {"size": 0},
"last_trade": {"size": 0},
"margin": 0.0,
},
"exchange": str(self.exchange).replace("Exchange.", "").lower(),
}
if self.get_config() != "":
simulation["config"] = self.get_config()
if self.state.buy_count == 0:
self.state.last_buy_size = 0
self.state.sell_sum = 0
else:
self.state.sell_sum = self.state.sell_sum + self.state.last_sell_size
table = Table(title=f"Simulation Summary: {self.market}", box=box.SQUARE, min_width=40, border_style="white", show_header=False)
table.add_column("Item", justify="right", style="white", no_wrap=True)
table.add_column("Value", justify="left", style="cyan")
remove_last_buy = False
if self.state.buy_count > self.state.sell_count:
remove_last_buy = True
self.state.buy_count -= 1 # remove last buy as there has not been a corresponding sell yet
self.state.last_buy_size = self.state.previous_buy_size
simulation["data"]["open_buy_excluded"] = 1
if not self.simresultonly:
table.add_row("Warning", Text("Simulation ended with an open trade and it will be excluded from the margin calculation.", style="orange1"))
table.add_row("")
else:
simulation["data"]["open_buy_excluded"] = 0
if remove_last_buy is True:
if not self.simresultonly:
table.add_row("Buy Count", Text(f"{str(self.state.buy_count)} (open buy order excluded)", "orange1"), style="white")
else:
simulation["data"]["buy_count"] = self.state.buy_count
else:
if not self.simresultonly:
table.add_row("Buy Count", f"{str(self.state.buy_count)}", style="white")
else:
simulation["data"]["buy_count"] = self.state.buy_count
if not self.simresultonly:
table.add_row("Sell Count", str(self.state.sell_count), style="white")
table.add_row("")
table.add_row(f"First Buy Order ({self.quote_currency})", Text(str(self.state.first_buy_size), style="white"), style="white")
table.add_row("")
if self.state.last_sell_size > self.state.last_buy_size:
table.add_row(f"Last Buy Order ({self.quote_currency})", Text(_truncate(self.state.last_buy_size, 4), style="bright_green"), style="white")
elif self.state.last_buy_size == self.state.last_sell_size:
table.add_row(f"Last Buy Order ({self.quote_currency})", Text(_truncate(self.state.last_buy_size, 4), style="orange1"), style="white")
else:
table.add_row(f"Last Buy Order ({self.quote_currency})", Text(_truncate(self.state.last_buy_size, 4), style="bright_red"), style="white")
else:
simulation["data"]["sell_count"] = self.state.sell_count
simulation["data"]["first_trade"] = {}
simulation["data"]["first_trade"]["size"] = self.state.first_buy_size
if self.state.sell_count > 0:
if not self.simresultonly:
if self.state.last_sell_size > self.state.last_buy_size:
table.add_row(
f"Last Sell Order ({self.quote_currency})", Text(_truncate(self.state.last_sell_size, 4), style="bright_green"), style="white"
)
elif self.state.last_buy_size == self.state.last_sell_size:
table.add_row(f"Last Sell Order ({self.quote_currency})", Text(_truncate(self.state.last_sell_size, 4), style="orange1"), style="white")
else:
table.add_row(f"Last Sell Order ({self.quote_currency})", Text(_truncate(self.state.last_sell_size, 4), style="bright_red"), style="white")
else:
simulation["data"]["last_trade"] = {}
simulation["data"]["last_trade"]["size"] = float(_truncate(self.state.last_sell_size, 2))
else:
if not self.simresultonly:
table.add_row("")
table.add_row("Margin", "0.00% (margin is nil as a sell has not occurred during the simulation)")
table.add_row("")
else:
simulation["data"]["margin"] = 0.0
if not self.disabletelegram:
self.notify_telegram(" Margin: 0.00%\n ** margin is nil as a sell has not occurred during the simulation\n")
if not self.disabletelegram:
self.notify_telegram(
"Simulation Summary\n"
+ f" Market: {self.market}\n"
+ f" Buy Count: {self.state.buy_count}\n"
+ f" Sell Count: {self.state.sell_count}\n"
+ f" First Buy: {self.state.first_buy_size}\n"
+ f" Last Buy: {str(_truncate(self.state.last_buy_size, 4))}\n"
+ f" Last Sell: {str(_truncate(self.state.last_sell_size, 4))}\n"
)
if self.state.sell_count > 0:
_last_trade_margin = float(
_truncate(
(((self.state.last_sell_size - self.state.last_buy_size) / self.state.last_buy_size) * 100),
4,
)
)
if not self.simresultonly:
if _last_trade_margin > 0:
table.add_row("Last Trade Margin", Text(f"{_last_trade_margin}%", style="bright_green"), style="white")
elif _last_trade_margin < 0:
table.add_row("Last Trade Margin", Text(f"{_last_trade_margin}%", style="bright_red"), style="white")
else:
table.add_row("Last Trade Margin", Text(f"{_last_trade_margin}%", style="orange1"), style="white")
if remove_last_buy:
table.add_row("")
table.add_row(
"Open Trade Margin",
Text(f"{self.state.open_trade_margin} (open trade excluded from margin calculation)", style="orange1"),
style="white",
)
table.add_row("")
table.add_row(f"Total Buy Volume ({self.quote_currency})", Text(_truncate(self.state.buy_tracker, 2), style="white"), style="white")
table.add_row("")
if self.state.profitlosstracker > 0:
table.add_row(
f"All Trades Profit/Loss ({self.quote_currency})",
Text(f"{_truncate(self.state.profitlosstracker, 2)} ({_truncate(self.state.feetracker,2)} in fees)", style="bright_green"),
style="white",
)
table.add_row(
f"All Trades Margin ({self.quote_currency})",
Text(f"{_truncate(self.state.margintracker, 4)}% (non-live simulation, assuming highest fees)", style="bright_green"),
style="white",
)
elif self.state.profitlosstracker < 0:
table.add_row(
f"All Trades Profit/Loss ({self.quote_currency})",
Text(f"{_truncate(self.state.profitlosstracker, 2)} ({_truncate(self.state.feetracker,2)} in fees)", style="bright_red"),
style="white",
)
table.add_row(
f"All Trades Margin ({self.quote_currency})",
Text(f"{_truncate(self.state.margintracker, 4)}% (non-live simulation, assuming highest fees)", style="bright_red"),
style="white",
)
else:
table.add_row(
f"All Trades Profit/Loss ({self.quote_currency})",
Text(f"{_truncate(self.state.profitlosstracker, 2)} ({_truncate(self.state.feetracker,2)} in fees)", style="orange1"),
style="white",
)
table.add_row(
f"All Trades Margin ({self.quote_currency})",
Text(f"{_truncate(self.state.margintracker, 4)}% (non-live simulation, assuming highest fees)", style="orange1"),
style="white",
)
else:
simulation["data"]["last_trade"]["margin"] = _last_trade_margin
simulation["data"]["all_trades"] = {}
simulation["data"]["all_trades"]["quote_currency"] = self.quote_currency
simulation["data"]["all_trades"]["value_buys"] = float(_truncate(self.state.buy_tracker, 2))
simulation["data"]["all_trades"]["profit_loss"] = float(_truncate(self.state.profitlosstracker, 2))
simulation["data"]["all_trades"]["fees"] = float(_truncate(self.state.feetracker, 2))
simulation["data"]["all_trades"]["margin"] = float(_truncate(self.state.margintracker, 4))
simulation["data"]["all_trades"]["open_trade_margin"] = float(_truncate(self.state.open_trade_margin_float, 4))
## revised telegram Summary notification to give total margin in addition to last trade margin.
if not self.disabletelegram:
self.notify_telegram(f" Last Trade Margin: {_last_trade_margin}%\n\n")
if remove_last_buy and not self.disabletelegram:
self.notify_telegram(f"\nOpen Trade Margin at end of simulation: {self.state.open_trade_margin}\n")
if not self.disabletelegram:
self.notify_telegram(
f" All Trades Margin: {_truncate(self.state.margintracker, 4)}%\n ** non-live simulation, assuming highest fees\n ** open trade excluded from margin calculation\n"
)
if not self.disabletelegram:
self.telegram_bot.remove_active_bot()
if self.simresultonly:
print(json.dumps(simulation, sort_keys=True, indent=4))
else:
print("") # blank line above table
self.console_term.print(table)
if self.disablelog is False:
self.console_log.print(table)
print("") # blank line below table
return simulation
def _simulation_save_orders(self) -> None:
if not self.disabletracker:
start = str(self.trading_data.head(1).index.format()[0]).replace(":", ".")
end = str(self.trading_data.tail(1).index.format()[0]).replace(":", ".")
filename = f"{self.market} {str(self.granularity.to_integer)} {str(start)} - {str(end)}_{self.tradesfile}"
try:
if not os.path.isabs(filename):
if not os.path.exists("csv"):
os.makedirs("csv")
self.trade_tracker.to_csv(os.path.join(os.curdir, "csv", filename))
except OSError:
RichText.notify(f"Unable to save: {filename}", "critical", self, "error")
def _generate_banner(self) -> None:
"""
Requirements for bot options:
- Update _generate_banner() in controllers/PyCryptoBot.py
- Update the command line arguments below
- Update the config parser in models/config/default_parser.py
"""
def config_option_row_int(
item: str = None, store_name: str = None, description: str = None, break_below: bool = False, default_value: int = 0, arg_name: str = None
) -> bool:
if item is None or store_name is None or description is None:
return False
if arg_name is None:
arg_name = store_name
if getattr(self, store_name) != default_value:
table.add_row(item, str(getattr(self, store_name)), description, f"--{arg_name} <num>")
else:
table.add_row(item, str(getattr(self, store_name)), description, f"--{arg_name} <num>", style="grey62")
if break_below is True:
table.add_row("", "", "")
return True
def config_option_row_float(
item: str = None, store_name: str = None, description: str = None, break_below: bool = False, default_value: float = 0, arg_name: str = None
) -> bool:
if item is None or store_name is None or description is None:
return False
if arg_name is None:
arg_name = store_name
if getattr(self, store_name) != default_value:
table.add_row(item, str(getattr(self, store_name)), description, f"--{arg_name} <num>")
else:
table.add_row(item, str(getattr(self, store_name)), description, f"--{arg_name} <num>", style="grey62")
if break_below is True:
table.add_row("", "", "")
return True
def config_option_row_bool(
item: str = None,
store_name: str = None,
description: str = None,
break_below: bool = False,
store_invert: bool = False,
default_value: bool = False,
arg_name: str = None,
) -> bool:
if item is None or store_name is None or description is None:
return False
if arg_name is None:
arg_name = store_name
if store_invert is True:
if not getattr(self, store_name) is not default_value:
table.add_row(item, str(not getattr(self, store_name)), description, f"--{arg_name} <1|0>")
else:
table.add_row(item, str(not getattr(self, store_name)), description, f"--{arg_name} <1|0>", style="grey62")
else:
if getattr(self, store_name) is not default_value:
table.add_row(item, str(getattr(self, store_name)), description, f"--{arg_name} <1|0>")
else:
table.add_row(item, str(getattr(self, store_name)), description, f"--{arg_name} <1|0>", style="grey62")
if break_below is True:
table.add_row("", "", "")
return True
def config_option_row_str(
item: str = None, store_name: str = None, description: str = None, break_below: bool = False, default_value: str = "", arg_name: str = None
) -> bool:
if item is None or store_name is None or description is None:
return False
if arg_name is None:
arg_name = store_name
try:
if getattr(self, store_name) != default_value:
table.add_row(item, str(getattr(self, store_name)), description, f"--{arg_name} <str>")
else:
table.add_row(item, str(getattr(self, store_name)), description, f"--{arg_name} <str>", style="grey62")
except AttributeError:
pass # ignore
if break_below is True:
table.add_row("", "", "")
return True
def config_option_row_enum(
item: str = None, store_name: str = None, description: str = None, break_below: bool = False, default_value: str = "", arg_name: str = None
) -> bool:
if item is None or store_name is None or description is None:
return False
if arg_name is None:
arg_name = store_name
if str(getattr(self, store_name)).replace(f"{item}.", "").lower() != default_value:
table.add_row(item, str(getattr(self, store_name)).replace(f"{item}.", "").lower(), description, f"--{arg_name} <str>")
else:
table.add_row(item, str(getattr(self, store_name)).replace(f"{item}.", "").lower(), description, f"--{arg_name} <str>", style="grey62")
if break_below is True:
table.add_row("", "", "")
return True
table = Table(title=f"Python Crypto Bot {self.get_version_from_readme(self)}")
table.add_column("Item", justify="right", style="cyan", no_wrap=True)
table.add_column("Value", justify="left", style="green")
table.add_column("Description", justify="left", style="magenta")
table.add_column("Option", justify="left", style="white")
table.add_row("Start", str(datetime.now()), "Bot start time")
table.add_row("", "", "")
config_option_row_bool(
"Enable Terminal Color",
"term_color",
"Enable terminal UI color",
store_invert=False,
default_value=True,
arg_name="termcolor",
)
config_option_row_int("Terminal UI Width", "term_width", "Set terminal UI width", default_value=self.term_width, arg_name="termwidth")
config_option_row_int("Terminal Log Width", "log_width", "Set terminal log width", break_below=True, default_value=180, arg_name="logwidth")
if self.is_live:
table.add_row("Bot Mode", "LIVE", "Live trades using your funds!", "--live <1|0>")
else:
if self.is_sim:
table.add_row("Bot Mode", "SIMULATION", "Back testing using simulations", "--sim <fast|slow>")
else:
table.add_row("Bot Mode", "TEST", "Test trades using dummy funds :)", "--live <1|0>")
table.add_row("", "", "")
config_option_row_enum("Exchange", "exchange", "Crypto currency exchange", default_value=None, arg_name="exchange")
config_option_row_str(
"Market", "market", "coinbase, coinbasepro and kucoin: BTC-GBP, binance: BTCGBP etc.", break_below=False, default_value=None, arg_name="market"
)
config_option_row_enum("Granularity", "granularity", "Granularity of the data", break_below=True, default_value="3600", arg_name="granularity")
config_option_row_bool(
"Enable Debugging",
"debug",
"Enable debug level logging",
break_below=True,
store_invert=False,
default_value=False,
arg_name="debug",
)
config_option_row_str(
"Sim Start Date",
"simstartdate",
"Start date for sample simulation e.g '2021-01-15'",
break_below=False,
default_value=None,
arg_name="simstartdate",
)
config_option_row_str(
"Sim End Date",
"simenddate",
"End date for sample simulation e.g '2021-01-15' or 'now'",
break_below=False,
default_value=None,
arg_name="simenddate",
)
config_option_row_bool(
"Sim Results Only",
"simresultonly",
"Simulation returns only the results",
break_below=True,
store_invert=False,
default_value=False,
arg_name="simresultonly",
)
config_option_row_bool(
"Telegram Notifications",
"disabletelegram",
"Enable Telegram notification messages",
store_invert=True,
default_value=False,
arg_name="telegram",
)
config_option_row_bool(
"Telegram Trades Only",
"telegramtradesonly",
"Telegram trades notifications only",
store_invert=False,
default_value=False,
arg_name="telegramtradesonlys",
)
config_option_row_bool(
"Telegram Error Messages",
"disabletelegramerrormsgs",
"Telegram error message notifications",
break_below=False,
store_invert=True,
default_value=False,
arg_name="telegramerrormsgs",
)
config_option_row_bool(
"Telegram Bot Control",
"telegrambotcontrol",
"Control your bot(s) with Telegram",
break_below=True,
store_invert=False,
default_value=False,
arg_name="telegrambotcontrol",
)
config_option_row_str(
"Config File", "config_file", "Use the config file at the given location", break_below=False, default_value="config.json", arg_name="configfile"
)
config_option_row_str(
"API Key File", "api_key_file", "Use the API key file at the given location", break_below=False, default_value=None, arg_name="api_key_file"
)
config_option_row_str(
"Log File", "logfile", "Use the log file at the given location", break_below=False, default_value="pycryptobot.log", arg_name="logfile"
)
config_option_row_str(
"Trades File",
"tradesfile",
"Use the simulation log trades at the given location",
break_below=True,
default_value="trades.csv",
arg_name="tradesfile",
)
config_option_row_bool("Enable Log", "disablelog", "Enable console logging", store_invert=True, default_value=True, arg_name="log")
config_option_row_bool(
"Enable Smart Switching", "smart_switch", "Enable switching between intervals", store_invert=False, default_value=False, arg_name="smartswitch"
)
config_option_row_bool(
"Enable Tracker", "disabletracker", "Enable trade order logging", store_invert=True, default_value=False, arg_name="tradetracker"
)
config_option_row_bool(
"Auto Restart Bot", "autorestart", "Auto restart the bot in case of exception", store_invert=False, default_value=False, arg_name="autorestart"
)
config_option_row_bool(
"Enable Websocket", "websocket", "Enable websockets for data retrieval", store_invert=False, default_value=False, arg_name="websocket"
)
config_option_row_bool(
"Insufficient Funds Log",
"enableinsufficientfundslogging",
"Enable insufficient funds logging",
store_invert=False,
default_value=False,
arg_name="insufficientfundslogging",
)
config_option_row_bool(
"JSON Log Trade", "logbuysellinjson", "Log buy and sell orders in a JSON file", store_invert=False, default_value=False, arg_name="logbuysellinjson"
)
config_option_row_bool(
"Manual Trading Only",
"manual_trades_only",
"Manual Trading Only (HODL)",
break_below=False,
store_invert=False,
default_value=False,
arg_name="manualtradesonly",
)
config_option_row_str(
"Start Method",
"startmethod",
"Bot start method ('scanner', 'standard', 'telegram')",
break_below=False,
default_value="standard",
arg_name="startmethod",
)
config_option_row_bool(
"Save Trading Graphs",
"save_graphs",
"Save graph images of trades",
break_below=False,
store_invert=False,
default_value=False,
arg_name="graphs",
)
config_option_row_float(
"Binance recvWindow",
"recv_window",
"Binance exchange API recvwindow, integer between 5000 and 60000",
break_below=False,
default_value=5000,
arg_name="recvwindow",
)
config_option_row_bool(
"Exit After Sell",
"exitaftersell",
"Exit the bot after a sell order",
break_below=False,
store_invert=False,
default_value=False,
arg_name="exitaftersell",
)
config_option_row_bool(
"Ignore Previous Buy",
"ignorepreviousbuy",
"Ignore previous buy failure",
break_below=False,
store_invert=False,
default_value=True,
arg_name="ignorepreviousbuy",
)
config_option_row_bool(
"Ignore Previous Sell",
"ignoreprevioussell",
"Ignore previous sell failure",
break_below=True,
store_invert=False,
default_value=True,
arg_name="ignoreprevioussell",
)
config_option_row_int(
"Adjust Total Periods", "adjusttotalperiods", "Adjust data points in historical trading data", break_below=True, default_value=300
)
config_option_row_float("Sell Upper Percent", "sell_upper_pcnt", "Upper trade margin to sell", default_value=None, arg_name="sellupperpcnt")
config_option_row_float("Sell Lower Percent", "sell_lower_pcnt", "Lower trade margin to sell", default_value=None, arg_name="selllowerpcnt")
config_option_row_float(
"No Sell Max", "nosellmaxpcnt", "Do not sell while trade margin is below this level", default_value=None, arg_name="nosellmaxpcnt"
)
config_option_row_float(
"No Sell Min", "nosellminpcnt", "Do not sell while trade margin is above this level", break_below=True, default_value=None, arg_name="nosellminpcnt"
)
config_option_row_bool(
"Prevent Loss", "preventloss", "Force a sell before margin is negative", store_invert=False, default_value=False, arg_name="preventloss"
)
config_option_row_float(
"Prevent Loss Trigger", "preventlosstrigger", "Margin that will trigger the prevent loss", default_value=1.0, arg_name="preventlosstrigger"
)
config_option_row_float(
"Prevent Loss Margin",
"preventlossmargin",
"Margin that will cause an immediate sell to prevent loss",
default_value=0.1,
arg_name="preventlossmargin",
)
config_option_row_bool(
"Sell At Loss",
"sellatloss",
"Allow a sell if the profit margin is negative",
break_below=True,
store_invert=False,
default_value=True,
arg_name="sellatloss",
)
config_option_row_bool(
"Buy Bull Only",
"disablebullonly",
"Only trade in a bull market SMA50 > SMA200",
break_below=True,
store_invert=True,
default_value=False,
arg_name="bullonly",
)
config_option_row_bool(
"Sell At Resistance",
"sellatresistance",
"Sell if the price hits a resistance level",
store_invert=False,
default_value=False,
arg_name="sellatresistance",
)
config_option_row_bool(
"Sell At Fibonacci Low",
"disablefailsafefibonaccilow",
"Sell if the price hits a fibonacci lower level",
store_invert=True,
default_value=False,
arg_name="sellatfibonaccilow",
)
config_option_row_bool(
"Sell Candlestick Reversal",
"disableprofitbankreversal",
"Sell at candlestick strong reversal pattern",
break_below=True,
store_invert=True,
default_value=False,
arg_name="profitbankreversal",
)
config_option_row_float(
"Trailing Stop Loss (TSL)", "trailing_stop_loss", "Percentage below the trade margin high to sell", default_value=0.0, arg_name="trailingstoploss"
)
config_option_row_float(
"Trailing Stop Loss Trigger",
"trailing_stop_loss_trigger",
"Trade margin percentage to enable the trailing stop loss",
default_value=0.0,
arg_name="trailingstoplosstrigger",
)
config_option_row_float(
"Trailing Sell Percent", "trailingsellpcnt", "Percentage of decrease to wait before selling", default_value=0.0, arg_name="trailingsellpcnt"
)
config_option_row_bool(
"Immediate Trailing Sell",
"trailingimmediatesell",
"Immediate sell if trailing sell percent is reached",
store_invert=False,
default_value=False,
arg_name="trailingimmediatesell",
)
config_option_row_float(
"Immediate Trailing Sell Percent",
"trailingsellimmediatepcnt",
"Percentage of decrease used with a strong sell signal",
default_value=0.0,
arg_name="trailingsellimmediatepcnt",
)
config_option_row_float(
"Trailing Sell Bailout Percent",
"trailingsellbailoutpcnt",
"Percentage of decrease to bailout, sell immediately",
break_below=True,
default_value=0.0,
arg_name="trailingsellbailoutpcnt",
)
config_option_row_bool(
"Dynamic Trailing Stop Loss (TSL)",
"dynamic_tsl",
"Dynamic Trailing Stop Loss (TSL)",
store_invert=False,
default_value=False,
arg_name="dynamictsl",
)
config_option_row_float(
"TSL Multiplier", "tsl_multiplier", "Please refer to the detailed explanation in the README.md", default_value=1.1, arg_name="tslmultiplier"
)
config_option_row_float(
"TSL Trigger Multiplier",
"tsl_trigger_multiplier",
"Please refer to the detailed explanation in the README.md",
default_value=1.1,
arg_name="tsltriggermultiplier",
)
config_option_row_float(
"TSL Max Percent",
"tsl_max_pcnt",
"Please refer to the detailed explanation in the README.md",
break_below=True,
default_value=-5.0,
arg_name="tslmaxpcnt",
)
config_option_row_float("Buy Percent", "buypercent", "Buy order size in quote currency as a percentage", default_value=100.0, arg_name="buypercent")
config_option_row_float("Sell Percent", "sellpercent", "Sell order size in quote currency as a percentage", default_value=100.0, arg_name="sellpercent")
config_option_row_float("Buy Min Size", "buyminsize", "Minimum buy order size in quote currency", default_value=0.0, arg_name="buyminsize")
config_option_row_float("Buy Max Size", "buymaxsize", "Maximum buy order size in quote currency", default_value=0.0, arg_name="buymaxsize")
config_option_row_bool(
"Buy Last Sell Size",
"buylastsellsize",
"Next buy order will match last sell order",
store_invert=False,
default_value=False,
arg_name="buylastsellsize",
)
config_option_row_bool(
"Multiple Buy Check",
"marketmultibuycheck",
"Additional check for market multiple buys",
store_invert=False,
default_value=False,
arg_name="marketmultibuycheck",
)
config_option_row_bool(
"Allow Buy Near High",
"disablebuynearhigh",
"Prevent the bot from buying at a recent high",
store_invert=True,
default_value=True,
arg_name="buynearhigh",
)
config_option_row_float(
"No Buy Near High Percent",
"nobuynearhighpcnt",
"Percentage from the range high to not buy",
break_below=True,
default_value=3.0,
arg_name="buynearhighpcnt",
)
config_option_row_float(
"Trailing Buy Percent",
"trailingbuypcnt",
"Percentage of increase to wait before buying",
default_value=0.0,
arg_name="trailingbuypcnt",
)
config_option_row_bool(
"Immediate Trailing Buy",
"trailingimmediatebuy",
"Immediate buy if trailing buy percent is reached",
store_invert=False,
default_value=False,
arg_name="trailingimmediatebuy",
)
config_option_row_float(
"Immediate Trailing Buy Percent",
"trailingbuyimmediatepcnt",
"Percent of increase to trigger immediate buy",
break_below=True,
default_value=0.0,
arg_name="trailingbuyimmediatepcnt",
)
config_option_row_bool("Override Sell Trigger", "selltriggeroverride", "Override sell trigger if strong buy", break_below=True, default_value=False)
config_option_row_bool("Use EMA12/26", "disablebuyema", "Exponential Moving Average (EMA)", store_invert=True, default_value=True, arg_name="ema1226")
config_option_row_bool(
"Use MACD/Signal", "disablebuymacd", "Moving Average Convergence Divergence (MACD)", store_invert=True, default_value=True, arg_name="macdsignal"
)
config_option_row_bool("On-Balance Volume (OBV)", "disablebuyobv", "On-Balance Volume (OBV)", store_invert=True, default_value=False, arg_name="obv")
config_option_row_bool(
"Use Elder-Ray", "disablebuyelderray", "Elder-Ray Index (Elder-Ray)", store_invert=True, default_value=False, arg_name="elderray"
)
config_option_row_bool(
"Use Bollinger Bands", "disablebuybbands_s1", "Bollinger Bands - Strategy 1", store_invert=True, default_value=False, arg_name="bbands_s1"
)
config_option_row_bool(
"Use Bollinger Bands",
"disablebuybbands_s2",
"Bollinger Bands - Strategy 2",
break_below=True,
store_invert=True,
default_value=False,
arg_name="bbands_s2",
)
self.console_term.print(table)
if self.disablelog is False:
self.console_log.print(table)
def get_date_from_iso8601_str(self, date: str):
# if date passed from datetime.now() remove milliseconds
if date.find(".") != -1:
dt = date.split(".")[0]
date = dt
date = date.replace("T", " ") if date.find("T") != -1 else date
# add time in case only a date is passed in
new_date_str = f"{date} 00:00:00" if len(date) == 10 else date
return datetime.strptime(new_date_str, "%Y-%m-%d %H:%M:%S")
# getters
def get_market(self):
if self.exchange == Exchange.BINANCE:
formatCheck = self.market.split("-") if self.market.find("-") != -1 else ""
if not formatCheck == "":
self.base_currency = formatCheck[0]
self.quote_currency = formatCheck[1]
self.market = self.base_currency + self.quote_currency
return self.market
def print_granularity(self) -> str:
if self.exchange == Exchange.KUCOIN:
return self.granularity.to_medium
if self.exchange == Exchange.BINANCE:
return self.granularity.to_short
if self.exchange == Exchange.COINBASE:
return str(self.granularity.to_integer)
if self.exchange == Exchange.COINBASEPRO:
return str(self.granularity.to_integer)
if self.exchange == Exchange.DUMMY:
return str(self.granularity.to_integer)
raise TypeError(f'Unknown exchange "{self.exchange.name}"')
def get_smart_switch_df(
self,
df: pd.DataFrame,
market,
granularity: Granularity,
simstart: str = "",
simend: str = "",
) -> pd.DataFrame:
def _notify(notification: str = "", level: str = "normal") -> None:
if notification == "":
return
if level == "warning":
color = "dark_orange"
elif level == "error":
color = "red1"
elif level == "critical":
color = "red1 blink"
else:
color = "violet"
self.table_console = Table(title=None, box=None, show_header=False, show_footer=False)
self.table_console.add_row(
RichText.styled_text("Bot1", "magenta"),
RichText.styled_text(datetime.today().strftime("%Y-%m-%d %H:%M:%S"), "white"),
RichText.styled_text(self.market, "yellow"),
RichText.styled_text(self.print_granularity(), "yellow"),
RichText.styled_text(notification, color),
)
self.console_term.print(self.table_console)
if self.disablelog is False:
self.console_log.print(self.table_console)
self.table_console = Table(title=None, box=None, show_header=False, show_footer=False) # clear table
if self.is_sim:
df_first = None
df_last = None
result_df_cache = df
simstart = self.get_date_from_iso8601_str(simstart)
simend = self.get_date_from_iso8601_str(simend)
try:
# if df already has data get first and last record date
if len(df) > 0:
df_first = self.get_date_from_iso8601_str(str(df.head(1).index.format()[0]))
df_last = self.get_date_from_iso8601_str(str(df.tail(1).index.format()[0]))
else:
result_df_cache = pd.DataFrame()
except Exception:
# if df = None create a new data frame
result_df_cache = pd.DataFrame()
if df_first is None and df_last is None:
if not self.is_sim or (self.is_sim and not self.simresultonly):
if self.smart_switch:
_notify(f"Retrieving smart switch {granularity.to_short} market data from the exchange.")
else:
_notify(f"Retrieving {granularity.to_short} market data from the exchange.")
df_first = simend
df_first -= timedelta(minutes=((granularity.to_integer / 60) * 200))
df1 = self.get_historical_data(
market,
granularity,
None,
str(df_first.isoformat()),
str(simend.isoformat()),
)
result_df_cache = df1
originalSimStart = self.get_date_from_iso8601_str(str(simstart))
adding_extra_candles = False
while df_first.isoformat(timespec="milliseconds") > simstart.isoformat(timespec="milliseconds") or df_first.isoformat(
timespec="milliseconds"
) > originalSimStart.isoformat(timespec="milliseconds"):
end_date = df_first
df_first -= timedelta(minutes=(self.adjusttotalperiods * (granularity.to_integer / 60)))
if df_first.isoformat(timespec="milliseconds") < simstart.isoformat(timespec="milliseconds"):
df_first = self.get_date_from_iso8601_str(str(simstart))
df2 = self.get_historical_data(
market,
granularity,
None,
str(df_first.isoformat()),
str(end_date.isoformat()),
)
# check to see if there are an extra 300 candles available to be used, if not just use the original starting point
if self.adjusttotalperiods >= 300 and adding_extra_candles is True and len(df2) <= 0:
self.extra_candles_found = False
simstart = originalSimStart
else:
result_df_cache = pd.concat([df2.copy(), df1.copy()]).drop_duplicates()
df1 = result_df_cache
# create df with 300 candles or adjusted total periods before the required start_date to match live
if df_first.isoformat(timespec="milliseconds") == simstart.isoformat(timespec="milliseconds"):
if adding_extra_candles is False:
simstart -= timedelta(minutes=(self.adjusttotalperiods * (granularity.to_integer / 60)))
adding_extra_candles = True
self.extra_candles_found = True
if len(result_df_cache) > 0 and "morning_star" not in result_df_cache:
result_df_cache.sort_values(by=["date"], ascending=True, inplace=True)
if self.smart_switch is False:
if self.extra_candles_found is False:
_notify(f"{str(self.exchange.value)} is not returning data for the requested start date.")
_notify(f"Switching to earliest start date: {str(result_df_cache.head(1).index.format()[0])}.")
self.simstartdate = str(result_df_cache.head(1).index.format()[0])
return result_df_cache.copy()
def get_smart_switch_historical_data_chained(
self,
market,
granularity: Granularity,
start: str = "",
end: str = "",
) -> pd.DataFrame:
if self.is_sim:
if self.sell_smart_switch == 1:
self.ema1226_5m_cache = self.get_smart_switch_df(self.ema1226_5m_cache, market, Granularity.FIVE_MINUTES, start, end)
self.ema1226_15m_cache = self.get_smart_switch_df(self.ema1226_15m_cache, market, Granularity.FIFTEEN_MINUTES, start, end)
self.ema1226_1h_cache = self.get_smart_switch_df(self.ema1226_1h_cache, market, Granularity.ONE_HOUR, start, end)
self.ema1226_6h_cache = self.get_smart_switch_df(self.ema1226_6h_cache, market, Granularity.SIX_HOURS, start, end)
if len(self.ema1226_15m_cache) == 0:
raise Exception(f"No data return for selected date range {start} - {end}")
if not self.extra_candles_found:
if granularity == Granularity.FIVE_MINUTES:
if (
self.get_date_from_iso8601_str(str(self.ema1226_5m_cache.index.format()[0])).isoformat()
!= self.get_date_from_iso8601_str(start).isoformat()
):
text_box = TextBox(80, 26)
text_box.singleLine()
text_box.center(f"{str(self.exchange.value)}is not returning data for the requested start date.")
text_box.center(f"Switching to earliest start date: {str(self.ema1226_5m_cache.head(1).index.format()[0])}")
text_box.singleLine()
self.simstartdate = str(self.ema1226_5m_cache.head(1).index.format()[0])
elif granularity == Granularity.FIFTEEN_MINUTES:
if (
self.get_date_from_iso8601_str(str(self.ema1226_15m_cache.index.format()[0])).isoformat()
!= self.get_date_from_iso8601_str(start).isoformat()
):
text_box = TextBox(80, 26)
text_box.singleLine()
text_box.center(f"{str(self.exchange.value)}is not returning data for the requested start date.")
text_box.center(f"Switching to earliest start date: {str(self.ema1226_15m_cache.head(1).index.format()[0])}")
text_box.singleLine()
self.simstartdate = str(self.ema1226_15m_cache.head(1).index.format()[0])
else:
if (
self.get_date_from_iso8601_str(str(self.ema1226_1h_cache.index.format()[0])).isoformat()
!= self.get_date_from_iso8601_str(start).isoformat()
):
text_box = TextBox(80, 26)
text_box.singleLine()
text_box.center(f"{str(self.exchange.value)} is not returning data for the requested start date.")
text_box.center(f"Switching to earliest start date: {str(self.ema1226_1h_cache.head(1).index.format()[0])}")
text_box.singleLine()
self.simstartdate = str(self.ema1226_1h_cache.head(1).index.format()[0])
if granularity == Granularity.FIFTEEN_MINUTES:
return self.ema1226_15m_cache
elif granularity == Granularity.FIVE_MINUTES:
return self.ema1226_5m_cache
else:
return self.ema1226_1h_cache
def get_historical_data_chained(self, market, granularity: Granularity, max_iterations: int = 1) -> pd.DataFrame:
df1 = self.get_historical_data(market, granularity, None)
if max_iterations == 1:
return df1
def get_previous_date_range(df: pd.DataFrame = None) -> tuple:
end_date = df["date"].min() - timedelta(seconds=(granularity.to_integer / 60))
new_start = df["date"].min() - timedelta(hours=self.adjusttotalperiods)
return (str(new_start).replace(" ", "T"), str(end_date).replace(" ", "T"))
iterations = 0
result_df = pd.DataFrame()
while iterations < (max_iterations - 1):
start_date, end_date = get_previous_date_range(df1)
df2 = self.get_historical_data(market, granularity, None, start_date, end_date)
result_df = pd.concat([df2, df1]).drop_duplicates()
df1 = result_df
iterations = iterations + 1
if "date" in result_df:
result_df.sort_values(by=["date"], ascending=True, inplace=True)
return result_df
def get_historical_data(
self,
market,
granularity: Granularity,
websocket,
iso8601start="",
iso8601end="",
):
if self.exchange == Exchange.COINBASE:
api = CBAuthAPI(self.api_key, self.api_secret, self.api_url, app=self)
elif self.exchange == Exchange.BINANCE:
api = BPublicAPI(api_url=self.api_url, app=self)
elif self.exchange == Exchange.KUCOIN: # returns data from coinbase if not specified
api = KPublicAPI(api_url=self.api_url, app=self)
# Kucoin only returns 100 rows if start not specified, make sure we get the right amount
if not self.is_sim and iso8601start == "":
start = datetime.now() - timedelta(minutes=(granularity.to_integer / 60) * self.adjusttotalperiods)
iso8601start = str(start.isoformat()).split(".")[0]
else: # returns data from coinbase pro if not specified
api = CPublicAPI(app=self)
if iso8601start != "" and iso8601end == "" and self.exchange != Exchange.BINANCE:
return api.get_historical_data(
market,
granularity,
None,
iso8601start,
)
elif iso8601start != "" and iso8601end != "":
return api.get_historical_data(
market,
granularity,
None,
iso8601start,
iso8601end,
)
else:
return api.get_historical_data(market, granularity, websocket)
def get_ticker(self, market, websocket):
if self.exchange == Exchange.COINBASE:
api = CBAuthAPI(self.api_key, self.api_secret, self.api_url, app=self)
return api.get_ticker(market, websocket)
if self.exchange == Exchange.BINANCE:
api = BPublicAPI(api_url=self.api_url, app=self)
return api.get_ticker(market, websocket)
elif self.exchange == Exchange.KUCOIN:
api = KPublicAPI(api_url=self.api_url, app=self)
return api.get_ticker(market, websocket)
else: # returns data from coinbase pro if not specified
api = CPublicAPI(app=self)
return api.get_ticker(market, websocket)
def get_time(self):
if self.exchange == Exchange.COINBASE:
return CPublicAPI(app=self).get_time()
elif self.exchange == Exchange.COINBASEPRO:
return CPublicAPI(app=self).get_time()
elif self.exchange == Exchange.KUCOIN:
return KPublicAPI(app=self).get_time()
elif self.exchange == Exchange.BINANCE:
try:
return BPublicAPI(app=self).get_time()
except ReadTimeoutError:
return ""
else:
return ""
def get_interval(self, df: pd.DataFrame = pd.DataFrame(), iterations: int = 0) -> pd.DataFrame:
if len(df) == 0:
return df
if self.is_sim and iterations > 0:
# with a simulation iterate through data
return df.iloc[iterations - 1 : iterations]
else:
# most recent entry
return df.tail(1)
def is_1h_ema1226_bull(self, iso8601end: str = ""):
try:
if self.is_sim and isinstance(self.ema1226_1h_cache, pd.DataFrame):
df_data = self.ema1226_1h_cache.loc[self.ema1226_1h_cache["date"] <= iso8601end].copy()
elif self.exchange != Exchange.DUMMY:
df_data = self.get_additional_df("1h", self.websocket_connection).copy()
self.ema1226_1h_cache = df_data
else:
return False
ta = TechnicalAnalysis(df_data, app=self)
if "ema12" not in df_data:
ta.add_ema(12)
if "ema26" not in df_data:
ta.add_ema(26)
df_last = ta.get_df().copy().iloc[-1, :]
df_last["bull"] = df_last["ema12"] > df_last["ema26"]
return bool(df_last["bull"])
except Exception:
return False
def is_6h_ema1226_bull(self, iso8601end: str = ""):
try:
if self.is_sim and isinstance(self.ema1226_1h_cache, pd.DataFrame):
df_data = self.ema1226_6h_cache.loc[self.ema1226_6h_cache["date"] <= iso8601end].copy()
elif self.exchange != Exchange.DUMMY:
df_data = self.get_additional_df("6h", self.websocket_connection).copy()
self.ema1226_6h_cache = df_data
else:
return False
ta = TechnicalAnalysis(df_data, app=self)
if "ema12" not in df_data:
ta.add_ema(12)
if "ema26" not in df_data:
ta.add_ema(26)
df_last = ta.get_df().copy().iloc[-1, :]
df_last["bull"] = df_last["ema12"] > df_last["ema26"]
return bool(df_last["bull"])
except Exception:
return False
def is_1h_sma50200_bull(self, iso8601end: str = ""):
# if periods adjusted and less than 200
if self.adjusttotalperiods < 200:
return False
try:
if self.is_sim and isinstance(self.sma50200_1h_cache, pd.DataFrame):
df_data = self.sma50200_1h_cache.loc[self.sma50200_1h_cache["date"] <= iso8601end].copy()
elif self.exchange != Exchange.DUMMY:
df_data = self.get_additional_df("1h", self.websocket_connection).copy()
self.sma50200_1h_cache = df_data
else:
return False
ta = TechnicalAnalysis(df_data, app=self)
if "sma50" not in df_data:
ta.add_sma(50)
if "sma200" not in df_data:
ta.add_sma(200)
df_last = ta.get_df().copy().iloc[-1, :]
df_last["bull"] = df_last["sma50"] > df_last["sma200"]
return bool(df_last["bull"])
except Exception:
return False
def get_additional_df(self, short_granularity, websocket) -> pd.DataFrame:
granularity = Granularity.convert_to_enum(short_granularity)
idx, next_idx = (None, 0)
for i in range(len(self.df_data)):
if isinstance(self.df_data[i], list) and self.df_data[i][0] == short_granularity:
idx = i
elif isinstance(self.df_data[i], list):
next_idx = i + 1
else:
break
# idx list:
# 0 = short_granularity (1h, 6h, 1d, 5m, 15m, etc.)
# 1 = granularity (ONE_HOUR, SIX_HOURS, FIFTEEN_MINUTES, etc.)
# 2 = df row (for last candle date)
# 3 = DataFrame
if idx is None:
idx = next_idx
self.df_data[idx] = [short_granularity, granularity, -1, pd.DataFrame()]
df = self.df_data[idx][3]
row = self.df_data[idx][2]
try:
if len(df) == 0 or ( # empty dataframe
len(df) > 0
and ( # if exists, only refresh at candleclose
datetime.timestamp(datetime.utcnow()) - granularity.to_integer >= datetime.timestamp(df["date"].iloc[row])
)
):
df = self.get_historical_data(self.market, self.granularity, self.websocket_connection)
row = -1
else:
# if ticker hasn't run yet or hasn't updated, return the original df
if websocket is not None and self.ticker_date is None:
return df
elif self.ticker_date is None or datetime.timestamp( # if calling API multiple times, per iteration, ticker may not be updated yet
datetime.utcnow()
) - 60 <= datetime.timestamp(df["date"].iloc[row]):
return df
elif row == -2: # update the new row added for ticker if it is there
df.iloc[-1, df.columns.get_loc("low")] = self.ticker_price if self.ticker_price < df["low"].iloc[-1] else df["low"].iloc[-1]
df.iloc[-1, df.columns.get_loc("high")] = self.ticker_price if self.ticker_price > df["high"].iloc[-1] else df["high"].iloc[-1]
df.iloc[-1, df.columns.get_loc("close")] = self.ticker_price
df.iloc[-1, df.columns.get_loc("date")] = datetime.strptime(self.ticker_date, "%Y-%m-%d %H:%M:%S")
tsidx = pd.DatetimeIndex(df["date"])
df.set_index(tsidx, inplace=True)
df.index.name = "ts"
else: # else we are adding a new row for the ticker data
new_row = pd.DataFrame(
columns=[
"date",
"market",
"granularity",
"open",
"high",
"close",
"low",
"volume",
],
data=[
[
datetime.strptime(self.ticker_date, "%Y-%m-%d %H:%M:%S"),
df["market"].iloc[-1],
df["granularity"].iloc[-1],
(self.ticker_price if self.ticker_price < df["close"].iloc[-1] else df["close"].iloc[-1]),
(self.ticker_price if self.ticker_price > df["close"].iloc[-1] else df["close"].iloc[-1]),
df["close"].iloc[-1],
self.ticker_price,
df["volume"].iloc[-1],
]
],
)
df = pd.concat([df, new_row], ignore_index=True)
tsidx = pd.DatetimeIndex(df["date"])
df.set_index(tsidx, inplace=True)
df.index.name = "ts"
row = -2
self.df_data[idx][3] = df
self.df_data[idx][2] = row
return df
except Exception as err:
raise Exception(f"Additional DF Error: {err}")
def get_last_buy(self) -> dict:
"""Retrieves the last exchange buy order and returns a dictionary"""
if not self.is_live:
# not live, return None
return None
try:
if self.exchange == Exchange.COINBASE:
api = CBAuthAPI(self.api_key, self.api_secret, self.api_url, app=self)
orders = api.get_orders(self.market, "", "done")
if len(orders) == 0:
return None
last_order = orders.tail(1)
if last_order["action"].values[0] != "buy":
return None
return {
"side": "buy",
"market": self.market,
"size": float(last_order["size"]),
"filled": float(last_order["filled"]),
"price": float(last_order["price"]),
"fee": float(last_order["fees"]),
"date": str(pd.DatetimeIndex(pd.to_datetime(last_order["created_at"]).dt.strftime("%Y-%m-%dT%H:%M:%S.%Z"))[0]),
}
elif self.exchange == Exchange.COINBASEPRO:
api = CAuthAPI(self.api_key, self.api_secret, self.api_passphrase, self.api_url, app=self)
orders = api.get_orders(self.market, "", "done")
if len(orders) == 0:
return None
last_order = orders.tail(1)
if last_order["action"].values[0] != "buy":
return None
return {
"side": "buy",
"market": self.market,
"size": float(last_order["size"]),
"filled": float(last_order["filled"]),
"price": float(last_order["price"]),
"fee": float(last_order["fees"]),
"date": str(pd.DatetimeIndex(pd.to_datetime(last_order["created_at"]).dt.strftime("%Y-%m-%dT%H:%M:%S.%Z"))[0]),
}
elif self.exchange == Exchange.KUCOIN:
api = KAuthAPI(self.api_key, self.api_secret, self.api_passphrase, self.api_url, use_cache=self.usekucoincache, app=self)
orders = api.get_orders(self.market, "", "done")
if len(orders) == 0:
return None
last_order = orders.tail(1)
if last_order["action"].values[0] != "buy":
return None
return {
"side": "buy",
"market": self.market,
"size": float(last_order["size"]),
"filled": float(last_order["filled"]),
"price": float(last_order["price"]),
"fee": float(last_order["fees"]),
"date": str(pd.DatetimeIndex(pd.to_datetime(last_order["created_at"]).dt.strftime("%Y-%m-%dT%H:%M:%S.%Z"))[0]),
}
elif self.exchange == Exchange.BINANCE:
api = BAuthAPI(self.api_key, self.api_secret, self.api_url, recv_window=self.recv_window, app=self)
orders = api.get_orders(self.market)
if len(orders) == 0:
return None
last_order = orders.tail(1)
if last_order["action"].values[0] != "buy":
return None
return {
"side": "buy",
"market": self.market,
"size": float(last_order["size"]),
"filled": float(last_order["filled"]),
"price": float(last_order["price"]),
"fees": float(last_order["size"] * 0.001),
"date": str(pd.DatetimeIndex(pd.to_datetime(last_order["created_at"]).dt.strftime("%Y-%m-%dT%H:%M:%S.%Z"))[0]),
}
else:
return None
except Exception:
return None
def get_taker_fee(self):
if not self.is_live and self.exchange == Exchange.COINBASE:
return 0.006 # default lowest fee tier
elif not self.is_live and self.exchange == Exchange.COINBASEPRO:
return 0.005 # default lowest fee tier
elif not self.is_live and self.exchange == Exchange.BINANCE:
# https://www.binance.com/en/support/announcement/binance-launches-zero-fee-bitcoin-trading-10435147c55d4a40b64fcbf43cb46329
# UPDATE: https://www.binance.com/en/support/announcement/updates-on-zero-fee-bitcoin-trading-busd-zero-maker-fee-promotion-be13a645cca643d28eab5b9b34f2dc36
if self.get_market() in [
"BTCTUSD"
]:
return 0.0 # no fees for those pairs
else:
return 0.001 # default lowest fee tier
elif not self.is_live and self.exchange == Exchange.KUCOIN:
return 0.0015 # default lowest fee tier
elif self.takerfee > -1.0:
return self.takerfee
elif self.exchange == Exchange.COINBASE:
api = CBAuthAPI(self.api_key, self.api_secret, self.api_url, app=self)
self.takerfee = api.get_taker_fee()
return self.takerfee
elif self.exchange == Exchange.COINBASEPRO:
api = CAuthAPI(self.api_key, self.api_secret, self.api_passphrase, self.api_url, app=self)
self.takerfee = api.get_taker_fee()
return self.takerfee
elif self.exchange == Exchange.BINANCE:
api = BAuthAPI(self.api_key, self.api_secret, self.api_url, recv_window=self.recv_window, app=self)
self.takerfee = api.get_taker_fee(self.get_market())
return self.takerfee
elif self.exchange == Exchange.KUCOIN:
api = KAuthAPI(self.api_key, self.api_secret, self.api_passphrase, self.api_url, use_cache=self.usekucoincache, app=self)
self.takerfee = api.get_taker_fee()
return self.takerfee
else:
return 0.005
def get_maker_fee(self):
if not self.is_live and self.exchange == Exchange.COINBASE:
return 0.004 # default lowest fee tier
elif not self.is_live and self.exchange == Exchange.COINBASEPRO:
return 0.005 # default lowest fee tier
elif not self.is_live and self.exchange == Exchange.BINANCE:
return 0.0 # default lowest fee tier
elif not self.is_live and self.exchange == Exchange.KUCOIN:
return 0.0015 # default lowest fee tier
elif self.makerfee > -1.0:
return self.makerfee
elif self.exchange == Exchange.COINBASE:
api = CBAuthAPI(self.api_key, self.api_secret, self.api_url, app=self)
return api.get_maker_fee()
elif self.exchange == Exchange.COINBASEPRO:
api = CAuthAPI(self.api_key, self.api_secret, self.api_passphrase, self.api_url, app=self)
return api.get_maker_fee()
elif self.exchange == Exchange.BINANCE:
api = BAuthAPI(self.api_key, self.api_secret, self.api_url, recv_window=self.recv_window, app=self)
return api.get_maker_fee(self.get_market())
elif self.exchange == Exchange.KUCOIN:
api = KAuthAPI(self.api_key, self.api_secret, self.api_passphrase, self.api_url, use_cache=self.usekucoincache, app=self)
return api.get_maker_fee()
else:
return 0.005
def get_buy_percent(self):
try:
return int(self.buypercent)
except Exception: # pylint: disable=broad-except
return 100
def get_sell_percent(self):
try:
return int(self.sellpercent)
except Exception: # pylint: disable=broad-except
return 100
def get_config(self) -> dict:
try:
config = json.loads(open(self.config_file, "r", encoding="utf8").read())
if self.exchange.value in config:
if "config" in config[self.exchange.value]:
return config[self.exchange.value]["config"]
else:
return {}
else:
return {}
except IOError:
return {}
class Granularity(Enum):
ONE_MINUTE = 60, "1m", "1min", "1T"
FIVE_MINUTES = 300, "5m", "5min", "5T"
FIFTEEN_MINUTES = 900, "15m", "15min", "15T"
THIRTY_MINUTES = 1800, "30m", "30min", "30T"
ONE_HOUR = 3600, "1h", "1hour", "1H"
SIX_HOURS = 21600, "6h", "6hour", "6H"
ONE_DAY = 86400, "1d", "1day", "1D"
def __init__(self, integer, short, medium, frequency):
self.integer = integer
self.short = short
self.medium = medium
self.frequency = frequency
def convert_to_enum(value):
for granularity in Granularity:
for enum_value in granularity.value:
if enum_value == value:
return granularity
raise ValueError("Invalid Granularity")
def to_short(self):
return self.short
def to_integer(self):
return self.integer
def to_medium(self):
return self.medium
def get_frequency(self):
return self.frequency
def load_configs():
exchanges_loaded = []
try:
with open("screener.json", encoding="utf8") as json_file:
config = json.load(json_file)
except IOError as err:
raise (err)
try:
with open("config.json", encoding="utf8") as json_file:
bot_config = json.load(json_file)
except IOError as err:
print(err)
try:
for exchange in config:
ex = CryptoExchange(exchange)
exchange_config = config[ex.value]
if ex == CryptoExchange.BINANCE:
binance_app = PyCryptoBot(exchange=ex)
binance_app.public_api = BPublicAPI(bot_config[ex.value]["api_url"])
binance_app.scanner_quote_currencies = exchange_config.get("quote_currency", ["USDT"])
binance_app.granularity = Granularity(Granularity.convert_to_enum(exchange_config.get("granularity", "1h")))
binance_app.adx_threshold = exchange_config.get("adx_threshold", 25)
binance_app.volatility_threshold = exchange_config.get("volatility_threshold", 9)
binance_app.minimum_volatility = exchange_config.get("minimum_volatility", 5)
binance_app.minimum_volume = exchange_config.get("minimum_volume", 20000)
binance_app.volume_threshold = exchange_config.get("volume_threshold", 20000)
binance_app.minimum_quote_price = exchange_config.get("minimum_quote_price", 0.0000001)
binance_app.selection_score = exchange_config.get("selection_score", 10)
binance_app.tv_screener_ratings = [rating.upper() for rating in exchange_config.get("tv_screener_ratings", ["STRONG_BUY"])]
exchanges_loaded.append(binance_app)
elif ex == CryptoExchange.COINBASE:
coinbase_app = PyCryptoBot(exchange=ex)
coinbase_app.public_api = CBAuthAPI(bot_config[ex.value]["api_key"], bot_config[ex.value]["api_secret"], bot_config[ex.value]["api_url"])
coinbase_app.scanner_quote_currencies = exchange_config.get("quote_currency", ["USDT"])
coinbase_app.granularity = Granularity(Granularity.convert_to_enum(int(exchange_config.get("granularity", "3600"))))
coinbase_app.adx_threshold = exchange_config.get("adx_threshold", 25)
coinbase_app.volatility_threshold = exchange_config.get("volatility_threshold", 9)
coinbase_app.minimum_volatility = exchange_config.get("minimum_volatility", 5)
coinbase_app.minimum_volume = exchange_config.get("minimum_volume", 20000)
coinbase_app.volume_threshold = exchange_config.get("volume_threshold", 20000)
coinbase_app.minimum_quote_price = exchange_config.get("minimum_quote_price", 0.0000001)
coinbase_app.selection_score = exchange_config.get("selection_score", 10)
coinbase_app.tv_screener_ratings = [rating.upper() for rating in exchange_config.get("tv_screener_ratings", ["STRONG_BUY"])]
exchanges_loaded.append(coinbase_app)
elif ex == CryptoExchange.COINBASEPRO:
coinbase_pro_app = PyCryptoBot(exchange=ex)
coinbase_pro_app.public_api = CPublicAPI()
coinbase_pro_app.scanner_quote_currencies = exchange_config.get("quote_currency", ["USDT"])
coinbase_pro_app.granularity = Granularity(Granularity.convert_to_enum(int(exchange_config.get("granularity", "3600"))))
coinbase_pro_app.adx_threshold = exchange_config.get("adx_threshold", 25)
coinbase_pro_app.volatility_threshold = exchange_config.get("volatility_threshold", 9)
coinbase_pro_app.minimum_volatility = exchange_config.get("minimum_volatility", 5)
coinbase_pro_app.minimum_volume = exchange_config.get("minimum_volume", 20000)
coinbase_pro_app.volume_threshold = exchange_config.get("volume_threshold", 20000)
coinbase_pro_app.minimum_quote_price = exchange_config.get("minimum_quote_price", 0.0000001)
coinbase_pro_app.selection_score = exchange_config.get("selection_score", 10)
coinbase_pro_app.tv_screener_ratings = [rating.upper() for rating in exchange_config.get("tv_screener_ratings", ["STRONG_BUY"])]
exchanges_loaded.append(coinbase_pro_app)
elif ex == CryptoExchange.KUCOIN:
kucoin_app = PyCryptoBot(exchange=ex)
kucoin_app.public_api = KPublicAPI(bot_config[ex.value]["api_url"])
kucoin_app.scanner_quote_currencies = exchange_config.get("quote_currency", ["USDT"])
kucoin_app.granularity = Granularity(Granularity.convert_to_enum(exchange_config.get("granularity", "1h")))
kucoin_app.adx_threshold = exchange_config.get("adx_threshold", 25)
kucoin_app.volatility_threshold = exchange_config.get("volatility_threshold", 9)
kucoin_app.minimum_volatility = exchange_config.get("minimum_volatility", 5)
kucoin_app.minimum_volume = exchange_config.get("minimum_volume", 20000)
kucoin_app.volume_threshold = exchange_config.get("volume_threshold", 20000)
kucoin_app.minimum_quote_price = exchange_config.get("minimum_quote_price", 0.0000001)
kucoin_app.selection_score = exchange_config.get("selection_score", 10)
kucoin_app.tv_screener_ratings = [rating.upper() for rating in exchange_config.get("tv_screener_ratings", ["STRONG_BUY"])]
exchanges_loaded.append(kucoin_app)
else:
raise ValueError(f"Invalid exchange found in config: {ex}")
except AttributeError as e:
print(f"Invalid exchange: {e}...ignoring.")
return exchanges_loaded | null |
168,152 | import time
import json
import pandas as pd
import re
import sys
from datetime import datetime
from decimal import Decimal
from itertools import islice
from tradingview_ta import *
from importlib.metadata import version
from controllers.PyCryptoBot import PyCryptoBot
from models.helper.TelegramBotHelper import TelegramBotHelper as TGBot
from models.exchange.binance import PublicAPI as BPublicAPI
from models.exchange.coinbase import AuthAPI as CBAuthAPI
from models.exchange.coinbase_pro import PublicAPI as CPublicAPI
from models.exchange.kucoin import PublicAPI as KPublicAPI
from models.exchange.Granularity import Granularity
from models.exchange.ExchangesEnum import Exchange as CryptoExchange
def get_markets(app, quote_currency):
markets = []
quote_currency = quote_currency.upper()
api = app.public_api
resp = api.get_markets_24hr_stats()
if app.exchange == CryptoExchange.BINANCE:
for row in resp:
if row["symbol"].endswith(quote_currency):
markets.append(row["symbol"])
elif app.exchange == CryptoExchange.COINBASE:
for market in resp:
market = str(market)
if market.endswith(f"-{quote_currency}"):
markets.append(market)
elif app.exchange == CryptoExchange.COINBASEPRO:
for market in resp:
market = str(market)
if market.endswith(f"-{quote_currency}"):
markets.append(market)
elif app.exchange == CryptoExchange.KUCOIN:
results = resp["data"]["ticker"]
for result in results:
if result["symbol"].endswith(f"-{quote_currency}"):
markets.append(result["symbol"])
return markets | null |
168,153 | import time
import json
import pandas as pd
import re
import sys
from datetime import datetime
from decimal import Decimal
from itertools import islice
from tradingview_ta import *
from importlib.metadata import version
from controllers.PyCryptoBot import PyCryptoBot
from models.helper.TelegramBotHelper import TelegramBotHelper as TGBot
from models.exchange.binance import PublicAPI as BPublicAPI
from models.exchange.coinbase import AuthAPI as CBAuthAPI
from models.exchange.coinbase_pro import PublicAPI as CPublicAPI
from models.exchange.kucoin import PublicAPI as KPublicAPI
from models.exchange.Granularity import Granularity
from models.exchange.ExchangesEnum import Exchange as CryptoExchange
def volatility_calculator(bollinger_band_upper, bollinger_band_lower, keltner_upper, keltner_lower, high, low):
"""
A break away from traditional volatility calculations. Based entirely
on the proportionate self.price gap between keltner channels, bolinger, and high / low averaged out
"""
try:
b_spread = Decimal(bollinger_band_upper) - Decimal(bollinger_band_lower)
k_spread = Decimal(keltner_upper) - Decimal(keltner_lower)
p_spread = Decimal(high) - Decimal(low)
except TypeError:
return 0
b_pcnt = abs(b_spread / Decimal(bollinger_band_lower)) * 100
k_pcnt = abs(k_spread / Decimal(keltner_lower)) * 100
chan_20_pcnt = (b_pcnt + k_pcnt) / 2
p_pcnt = abs(p_spread / Decimal(low)) * 100
return abs((chan_20_pcnt + p_pcnt) / 2)
def chunker(market_list, chunk_size):
markets = iter(market_list)
market_chunk = list(islice(markets, chunk_size))
while market_chunk:
yield market_chunk
market_chunk = list(islice(markets, chunk_size))
The provided code snippet includes necessary dependencies for implementing the `process_screener_data` function. Write a Python function `def process_screener_data(app, markets, quote_currency, exchange_name)` to solve the following problem:
Hit TradingView up for the goods so we don't waste unnecessary time/compute resources (brandon's top picks)
Here is the function:
def process_screener_data(app, markets, quote_currency, exchange_name):
"""
Hit TradingView up for the goods so we don't waste unnecessary time/compute resources (brandon's top picks)
"""
ta_screener_list = [f"{re.sub('PRO', '', app.exchange.name, re.IGNORECASE)}:{re.sub('-', '', market)}" for market in markets]
screener_staging = [p for p in chunker(ta_screener_list, 100)]
screener_analysis = []
additional_indicators = ["ATR", "KltChnl.upper", "KltChnl.lower"]
# TradingView.indicators.append("Volatility.D")
for pair_list in screener_staging:
screener_analysis.extend(
[
a
for a in get_multiple_analysis( # noqa: F405
screener="crypto", interval=app.granularity.short, symbols=pair_list, additional_indicators=additional_indicators
).values()
]
)
# Take what we need and do magic, ditch the rest.
formatted_ta = []
for ta in screener_analysis:
try:
if app.debug:
print(f"Checking {ta.symbol} on {exchange_name}\n")
recommend = Decimal(ta.indicators.get("Recommend.All"))
volatility = Decimal(
volatility_calculator(
ta.indicators["BB.upper"],
ta.indicators["BB.lower"],
ta.indicators["KltChnl.upper"],
ta.indicators["KltChnl.lower"],
ta.indicators["high"],
ta.indicators["low"],
)
)
# volatility = Decimal(ta.indicators['Volatility.D']) * 100
adx = abs(Decimal(ta.indicators["ADX"]))
adx_posi_di = Decimal(ta.indicators["ADX+DI"])
adx_neg_di = Decimal(ta.indicators["ADX-DI"])
high = Decimal(ta.indicators["high"]).quantize(Decimal("1e-{}".format(8))) # noqa: F841
low = Decimal(ta.indicators["low"]).quantize(Decimal("1e-{}".format(8))) # noqa: F841
close = Decimal(ta.indicators["close"]).quantize(Decimal("1e-{}".format(8)))
# ATR normalised
atr = (Decimal(ta.indicators["ATR"]) / close * 100).quantize(Decimal("1e-{}".format(2))) if "ATR" in ta.indicators else 0
volume = Decimal(ta.indicators["volume"])
macd = Decimal(ta.indicators["MACD.macd"])
macd_signal = Decimal(ta.indicators["MACD.signal"])
bollinger_upper = Decimal(ta.indicators["BB.upper"])
bollinger_lower = Decimal(ta.indicators["BB.lower"])
kelt_upper = Decimal(ta.indicators["KltChnl.upper"]) # noqa: F841
kelt_lower = Decimal(ta.indicators["KltChnl.lower"]) # noqa: F841
rsi = Decimal(ta.indicators.get("RSI", 0))
stoch_d = Decimal(ta.indicators.get("Stoch.D", 0))
stoch_k = Decimal(ta.indicators.get("Stoch.K", 0))
williams_r = Decimal(ta.indicators.get("W.R", 0))
score = 0
analysis_summary = ta.summary # noqa: F841
rating = ta.summary["RECOMMENDATION"]
# print(close)
if rating == "SELL":
score -= 2.5
elif rating == "STRONG_SELL":
score -= 5
elif rating == "NEUTRAL":
score += 0
elif rating == "BUY":
score += 2.5
elif rating == "STRONG_BUY":
score += 5
if (adx >= app.adx_threshold) and (adx_posi_di > adx_neg_di) and (adx_posi_di > adx):
if app.debug:
print(f"ADX({adx}) >= {app.adx_threshold}")
score += 1
if volume >= app.volume_threshold:
if app.debug:
print(f"Volume({volume}) >= {app.volume_threshold}")
score += 1
if abs(macd) > abs(macd_signal):
if app.debug:
print(f"MACD({macd}) above signal({macd_signal})")
score += 1
if volatility >= app.volatility_threshold:
if app.debug:
print(f"Volatility({volatility} is above {app.volatility_threshold}")
score += 1
if volatility < app.minimum_volatility:
if app.debug:
print(f"{ta.symbol} ({volatility}) is below min volatility of {app.minimum_volatility}")
score -= 100
if volume < app.minimum_volume:
if app.debug:
print(f"{ta.symbol} ({volume}) is below min volume of {app.volume}")
score -= 100
if close < app.minimum_quote_price:
if app.debug:
print(f"{ta.symbol} ({close}) is below min quote self.price of {app.minimum_quote_price}")
score -= 100
if 30 >= rsi > 20:
score += 1
if 20 < stoch_d <= 30:
score += 1
if stoch_k > stoch_d:
score += 1
if williams_r <= -30:
score += 1
# print('symbol\tscore\tvolume\tvvolatilith\tadx\tadx_posi_di\tadx_neg_di\tmacd\tmacd_signal\tbollinger_upper\tbollinger_lower\trecommend')
# print(ta.symbol, score, volume, volatility, adx, adx_posi_di, adx_neg_di, macd, macd_signal, bollinger_upper, bollinger_lower, recommend, "\n")
# print(f"Symbol: {ta.symbol} Score: {score}/{self.selection_score} Rating: {rating}")
if (score >= app.selection_score) and (rating in app.tv_screener_ratings):
relavent_ta = {}
if app.exchange == CryptoExchange.COINBASE or app.exchange == CryptoExchange.COINBASEPRO or app.exchange == CryptoExchange.KUCOIN:
relavent_ta["market"] = re.sub(rf"(.*){quote_currency}", rf"\1-{quote_currency}", ta.symbol)
# relavent_ta['market'] = re.sub(quote_currency,f"-{quote_currency}", ta.symbol)
else:
relavent_ta["market"] = ta.symbol
# relavent_ta['market'] = ta.symbol
relavent_ta["recommend"] = recommend
relavent_ta["volume"] = volume
relavent_ta["volatility"] = volatility
relavent_ta["adx"] = adx
relavent_ta["adx+di"] = adx_posi_di
relavent_ta["adx-di"] = adx_neg_di
relavent_ta["macd"] = macd
relavent_ta["macd.signal"] = macd_signal
relavent_ta["bollinger_upper"] = bollinger_upper
relavent_ta["bollinger_lower"] = bollinger_lower
relavent_ta["rsi"] = rsi
relavent_ta["stoch_d"] = stoch_d
relavent_ta["stoch_k"] = stoch_k
relavent_ta["williamsr"] = williams_r
relavent_ta["rating"] = rating
relavent_ta["score"] = score
## Hack a percentage from the recommendation which would take into account all the indicators rather than just ATR
if atr > 0:
relavent_ta["atr72_pcnt"] = atr
# else:
# if recommend > 0:
# relavent_ta['atr72_pcnt'] = recommend * 100
else:
relavent_ta["atr72_pcnt"] = 0
try:
relavent_ta["buy_next"] = "SEND IT!" if re.search("BUY", rating) else False
except AttributeError:
relavent_ta["buy_next"] = False
formatted_ta.append(relavent_ta)
except Exception:
pass
if formatted_ta:
# Stick it in a DF for the bots
df_markets = pd.DataFrame(formatted_ta)
df_markets = df_markets[
[
"market",
"score",
"recommend",
"volume",
"volatility",
"adx",
"adx+di",
"adx-di",
"macd",
"macd.signal",
"bollinger_upper",
"bollinger_lower",
"rsi",
"stoch_d",
"stoch_k",
"williamsr",
"rating",
"buy_next",
"atr72_pcnt",
]
]
df_markets.columns = [
"market",
"score",
"recommend",
"volume",
"volatility",
"adx",
"adx+di",
"adx-di",
"macd",
"macd.signal",
"bollinger_upper",
"bollinger_lower",
"rsi",
"stoch_d",
"stoch_k",
"williamsr",
"rating",
"buy_next",
"atr72_pcnt",
]
df_markets["score"] = df_markets["score"].astype(float).round(0).astype(int)
df_markets["recommend"] = df_markets["recommend"].astype(float)
df_markets["volume"] = df_markets["volume"].astype(float).round(0).astype(int)
df_markets["volatility"] = df_markets["volatility"].astype(float)
df_markets["adx"] = df_markets["adx"].astype(float)
df_markets["adx+di"] = df_markets["adx+di"].astype(float)
df_markets["adx-di"] = df_markets["adx-di"].astype(float)
df_markets["macd"] = df_markets["macd"].astype(float)
df_markets["macd.signal"] = df_markets["macd.signal"].astype(float)
df_markets["bollinger_upper"] = df_markets["bollinger_upper"].astype(float)
df_markets["bollinger_lower"] = df_markets["bollinger_lower"].astype(float)
df_markets["rsi"] = df_markets["rsi"].astype(float)
df_markets["stoch_d"] = df_markets["stoch_d"].astype(float)
df_markets["stoch_k"] = df_markets["stoch_k"].astype(float)
df_markets["williamsr"] = df_markets["williamsr"].astype(float)
df_markets["atr72_pcnt"] = df_markets["atr72_pcnt"].astype(float)
df_markets.sort_values(by=["market"], ascending=True, inplace=True)
df_markets.set_index("market", inplace=True)
print(df_markets.sort_values(by=["buy_next", "atr72_pcnt"], ascending=[False, False], inplace=False))
TGBot(app, scanner=True).save_scanner_output(app.exchange.value, quote_currency, df_markets)
else:
blank_data = {}
blank_data["buy_next"] = False
blank_data["atr72_pcnt"] = 0
blank_data["volume"] = 0
formatted_ta.append(blank_data)
df_markets = pd.DataFrame(formatted_ta)
TGBot(app, scanner=True).save_scanner_output(app.exchange.value, quote_currency, df_markets)
print("No pairs found!")
return True | Hit TradingView up for the goods so we don't waste unnecessary time/compute resources (brandon's top picks) |
168,154 | from stat import UF_APPEND
from views.PyCryptoBot import RichText
class RichText:
def notify(_notification, app: object = None, level: str = "normal") -> None:
# if notification is not a string, convert it to a string
notification = ""
if isinstance(_notification, str):
notification = str(_notification)
if app is None:
raise TypeError("app is None")
if notification == "":
return
if level not in ["emergency", "alert", "critical", "error", "warning", "notice", "info", "debug", "normal"]:
raise ValueError(f"RichText log level, '{level}' is not valid!")
if level == "emergency":
color = "bright_red blink"
elif level == "alert":
color = "bright_red"
elif level == "critical":
color = "red3 blink"
elif level == "error":
color = "red3"
elif level == "warning":
color = "dark_orange"
elif level == "notice":
color = "magenta"
elif level == "info":
color = "white"
elif level == "debug":
color = "dark_orange"
elif level == "normal":
color = "orange_red1"
else:
color = "violet"
table_console = Table(title=None, box=None, show_header=False, show_footer=False)
table_console.add_row(
RichText.styled_text("Bot1", "magenta"),
RichText.styled_text(datetime.today().strftime("%Y-%m-%d %H:%M:%S"), "white"),
RichText.styled_text(app.market, "yellow"),
RichText.styled_text(app.print_granularity(), "yellow"),
RichText.styled_text(notification, color),
)
console_term = Console(no_color=(not app.term_color), width=app.term_width)
console_term.print(table_console)
if app.disablelog is False:
app.console_log.print(table_console)
def action_text(action: str = "WAIT") -> Text:
if action == "":
return None
action_msg = f"Action: {action}"
text = Text(action_msg)
text.stylize("white", 0, 7)
text.stylize("cyan", 8, len(action_msg))
return text
def last_action_text(action: str = "WAIT") -> Text:
if action == "":
return None
action_msg = f"Last Action: {action}"
text = Text(action_msg)
text.stylize("white", 0, 12)
text.stylize("cyan", 13, len(action_msg))
return text
def styled_text(input: str = "", color: str = "white", disabled: bool = False) -> Text:
if disabled or input == "":
return None
text = Text(input)
text.stylize(color, 0, len(input))
return text
def styled_label_text(label: str = "", label_color: str = "white", input: str = "", input_color: str = "cyan", disabled: bool = False) -> Text:
if disabled or input == "":
return None
label_text_msg = f"{label}: {input}"
text = Text(label_text_msg)
text.stylize(label_color, 0, len(label))
text.stylize(input_color, len(label) + 1, len(label_text_msg))
return text
def margin_text(
margin_text: str = "",
last_action: str = "WAIT",
) -> Text:
if margin_text == "" or last_action != "BUY":
return None
margin_msg = f"Margin: {margin_text}"
text = Text(margin_msg)
if margin_text == "0%":
text.stylize("white", 0, len(margin_msg))
elif margin_text.startswith("-"):
text.stylize("white", 0, 5)
text.stylize("red", 7, len(margin_msg))
else:
text.stylize("white", 0, 5)
text.stylize("green", 7, len(margin_msg))
return text
def delta_text(
price: float = 0.0,
last_buy_price: float = 0.0,
precision: int = 2,
last_action: str = "WAIT",
) -> Text:
if price == 0.0 or last_buy_price == 0.0 or last_action != "BUY":
return None
delta_msg = f"Delta: {str(round(price - last_buy_price, precision))}"
text = Text(delta_msg)
if delta_msg.startswith("Delta: -"):
text.stylize("white", 0, 5)
text.stylize("red", 7, len(delta_msg))
else:
text.stylize("white", 0, 5)
text.stylize("green", 7, len(delta_msg))
return text
def bull_bear(golden_cross: bool = False, adjusttotalperiods: int = 300) -> Text:
if adjusttotalperiods < 200:
return None
if golden_cross:
text = Text("BULL")
text.stylize("green", 0, 4)
else:
text = Text("BEAR")
text.stylize("red", 0, 4)
return text
def elder_ray(elder_ray_buy: bool = False, elder_ray_sell: bool = False, disabled: bool = False) -> Text:
if disabled:
return None
if elder_ray_buy:
text = Text("Elder-Ray: buy")
text.stylize("white", 0, 10)
text.stylize("green", 11, 14)
elif elder_ray_sell:
text = Text("Elder-Ray: sell")
text.stylize("white", 0, 10)
text.stylize("red", 11, 15)
else:
return None
return text
def on_balance_volume(obv: float = 0.0, obv_pc: int = 0, disabled: bool = False) -> Text:
if disabled:
return None
obv_msg = f"OBV: {obv:.2f} ({obv_pc}%)"
if obv >= 0:
text = Text(obv_msg)
text.stylize("white", 0, 4)
text.stylize("green", 5, len(obv_msg))
else:
text = Text(f"OBV: {obv:.2f} ({obv_pc}%)")
text.stylize("white", 0, 4)
text.stylize("red", 5, len(obv_msg))
return text
def number_comparison(label: str = "", value1: float = 0.0, value2: float = 0.0, highlight: bool = False, disabled: bool = False) -> Text:
if disabled:
return None
color = "white"
operator = "="
if value1 > value2:
if highlight:
color = "white on green"
else:
color = "green"
operator = ">"
elif value1 < value2:
if highlight:
color = "white on red"
else:
color = "red"
operator = "<"
text = Text(f"{label} {value1} {operator} {value2}")
text.stylize("white", 0, len(label))
text.stylize(color, len(label) + 1, len(text))
return text
The provided code snippet includes necessary dependencies for implementing the `calculate_margin` function. Write a Python function `def calculate_margin( buy_size: float = 0.0, buy_filled: int = 0.0, buy_price: int = 0.0, buy_fee: float = 0.0, sell_percent: float = 100, sell_price: float = 0.0, sell_fee: float = 0.0, sell_taker_fee: float = 0.0, app: object = None, ) -> float` to solve the following problem:
Calculate the margin for a given trade.
Here is the function:
def calculate_margin(
buy_size: float = 0.0,
buy_filled: int = 0.0,
buy_price: int = 0.0,
buy_fee: float = 0.0,
sell_percent: float = 100,
sell_price: float = 0.0,
sell_fee: float = 0.0,
sell_taker_fee: float = 0.0,
app: object = None,
) -> float:
"""
Calculate the margin for a given trade.
"""
PRECISION = 8
if app is not None and app.debug:
RichText.notify(f"buy_size: {buy_size}", app, "debug") # buy_size is quote currency (before fees)
RichText.notify(f"buy_filled: {buy_filled}", app, "debug") # buy_filled is base currency (after fees)
RichText.notify(f"buy_price: {buy_price}", app, "debug") # buy_price is quote currency
RichText.notify(f"buy_fee: {buy_fee}", app, "debug") # buy_fee is quote currency
# sell_size is quote currency (before fees) - self.price * buy_filled
sell_size = round((sell_percent / 100) * (sell_price * buy_filled), PRECISION)
# calculate sell_fee in quote currency, sell_fee is actual fee, sell_taker_fee is the rate
if sell_fee == 0.0 and sell_taker_fee > 0.0:
sell_fee = round((sell_size * sell_taker_fee), PRECISION)
# calculate sell_filled after fees in quote currency
sell_filled = round(sell_size - sell_fee, PRECISION)
# profit is the difference between buy_size without fees and sell_filled with fees
profit = round(sell_filled - buy_size, PRECISION)
# error handling
if buy_size == 0.0:
RichText.notify("buy_size is 0.0 and would result in a divide by 0 error", app, "error")
return 0, 0, 0
# calculate margin
margin = round((profit / buy_size) * 100, PRECISION) # TODO: division by zero check
if app is not None and app.debug:
RichText.notify(f"sell_size: {sell_size}", app, "debug") # sell_size is quote currency (before fees)
RichText.notify(f"sell_filled: {sell_filled}", app, "debug") # sell_filled is quote currency (after fees)
RichText.notify(f"sell_price: {sell_price}", app, "debug") # sell_price is quote currency
RichText.notify(f"sell_fee: {sell_fee}", app, "debug") # sell_fee is quote currency
RichText.notify(f"profit: {profit}", app, "debug")
RichText.notify(f"margin: {margin}", app, "debug")
return margin, profit, sell_fee | Calculate the margin for a given trade. |
168,155 | import ast
import json
import os.path
import re
import sys
from .default_parser import is_currency_valid, default_config_parse, merge_config_and_args
from models.exchange.Granularity import Granularity
def parse_market(market):
def merge_config_and_args(exchange_config, args):
def is_currency_valid(currency):
def default_config_parse(app, config):
class Granularity(Enum):
def __init__(self, integer, short, medium, frequency):
def convert_to_enum(value):
def to_short(self):
def to_integer(self):
def to_medium(self):
def get_frequency(self):
def parser(app, coinbase_config, args={}):
if not app:
raise Exception("No app is passed")
if isinstance(coinbase_config, dict):
if "api_key" in coinbase_config or "api_secret" in coinbase_config or "api_passphrase" in coinbase_config:
print(">>> migrating api keys to coinbasepro.key <<<\n")
# create 'coinbasepro.key'
fh = open("coinbasepro.key", "w")
fh.write(f"{coinbase_config['api_key']}\n{coinbase_config['api_secret']}\n{coinbase_config['api_passphrase']}")
fh.close()
if os.path.isfile("config.json") and os.path.isfile("coinbasepro.key"):
coinbase_config["api_key_file"] = coinbase_config.pop("api_key")
coinbase_config["api_key_file"] = "coinbasepro.key"
del coinbase_config["api_secret"]
del coinbase_config["api_passphrase"]
# read 'coinbasepro' element from config.json
fh = open("config.json", "r")
config_json = ast.literal_eval(fh.read())
config_json["coinbasepro"] = coinbase_config
fh.close()
# write new 'coinbasepro' element
fh = open("config.json", "w")
fh.write(json.dumps(config_json, indent=4))
fh.close()
app.api_key_file = "coinbasepro.key"
if "api_key_file" in args and args["api_key_file"] is not None:
app.api_key_file = args["api_key_file"]
elif "api_key_file" in coinbase_config:
app.api_key_file = coinbase_config["api_key_file"]
if app.api_key_file is not None:
if not os.path.isfile(app.api_key_file):
try:
raise Exception(f"Unable to read {app.api_key_file}, please check the file exists and is readable. Remove \"api_key_file\" key from the config file for test mode!\n")
except Exception as e:
print(f"{type(e).__name__}: {e}")
sys.exit(1)
else:
try:
with open(app.api_key_file, "r") as f:
key = f.readline().strip()
secret = f.readline().strip()
password = f.readline().strip()
coinbase_config["api_key"] = key
coinbase_config["api_secret"] = secret
coinbase_config["api_passphrase"] = password
except Exception:
raise RuntimeError(f"Unable to read {app.api_key_file}")
if "api_key" in coinbase_config and "api_secret" in coinbase_config and "api_passphrase" in coinbase_config and "api_url" in coinbase_config:
# validates the api key is syntactically correct
p = re.compile(r"^[a-f0-9]{32}$")
if not p.match(coinbase_config["api_key"]):
raise TypeError("Coinbase Pro API key is invalid")
app.api_key = coinbase_config["api_key"]
# validates the api secret is syntactically correct
p = re.compile(r"^[A-z0-9+\/]+==$")
if not p.match(coinbase_config["api_secret"]):
raise TypeError("Coinbase Pro API secret is invalid")
app.api_secret = coinbase_config["api_secret"]
# validates the api passphrase is syntactically correct
p = re.compile(r"^[A-z0-9#$%=@!{},`~&*()<>?.:;_|^/+\[\]]{8,32}$")
if not p.match(coinbase_config["api_passphrase"]):
raise TypeError("Coinbase Pro API passphrase is invalid")
app.api_passphrase = coinbase_config["api_passphrase"]
valid_urls = [
"https://api.exchange.coinbase.com",
"https://api.exchange.coinbase.com/",
"https://public.sandbox.pro.coinbase.com",
"https://public.sandbox.pro.coinbase.com/",
]
# validate Coinbase Pro API
if coinbase_config["api_url"] not in valid_urls:
raise ValueError("Coinbase Pro API URL is invalid")
app.api_url = coinbase_config["api_url"]
else:
coinbase_config = {}
config = merge_config_and_args(coinbase_config, args)
default_config_parse(app, config)
if "base_currency" in config and config["base_currency"] is not None:
if not is_currency_valid(config["base_currency"]):
raise TypeError("Base currency is invalid.")
app.base_currency = config["base_currency"]
if "quote_currency" in config and config["quote_currency"] is not None:
if not is_currency_valid(config["quote_currency"]):
raise TypeError("Quote currency is invalid.")
app.quote_currency = config["quote_currency"]
if "market" in config and config["market"] is not None:
app.market, app.base_currency, app.quote_currency = parse_market(config["market"])
if app.base_currency != "" and app.quote_currency != "":
app.market = app.base_currency + "-" + app.quote_currency
if "granularity" in config and config["granularity"] is not None:
if isinstance(config["granularity"], str) and config["granularity"].isnumeric() is True:
app.granularity = Granularity.convert_to_enum(int(config["granularity"]))
elif isinstance(config["granularity"], int):
app.granularity = Granularity.convert_to_enum(config["granularity"]) | null |
168,156 | import re
from .default_parser import is_currency_valid, default_config_parse, merge_config_and_args
from models.helper.LogHelper import Logger
def parser(app, logger_config):
if not logger_config:
raise Exception("There is an error in your config dictionary")
if not app:
raise Exception("No app is passed")
if "filelog" in logger_config:
if isinstance(logger_config["filelog"], int):
if logger_config["filelog"] in [0, 1]:
app.filelog = logger_config["filelog"]
else:
raise TypeError("filelog must be type of int")
if app.filelog:
if "logfile" in logger_config:
if isinstance(logger_config["logfile"], str):
if app.logfile == "pycryptobot.log":
app.logfile = logger_config["logfile"]
else:
raise TypeError("logfile must be type of str")
if "fileloglevel" in logger_config:
if isinstance(logger_config["fileloglevel"], str):
if logger_config["fileloglevel"] in ("CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET"):
app.fileloglevel = logger_config["fileloglevel"]
else:
raise TypeError('fileloglevel must be one of: "CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET"')
else:
raise TypeError("fileloglevel must be type of str")
if "consolelog" in logger_config:
if isinstance(logger_config["consolelog"], int):
if logger_config["consolelog"] in [0, 1]:
app.consolelog = logger_config["consolelog"]
else:
raise TypeError("consolelog must be type of int")
if app.consolelog:
if "consoleloglevel" in logger_config:
if isinstance(logger_config["consoleloglevel"], str):
if logger_config["consoleloglevel"] in ("CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET"):
app.consoleloglevel = logger_config["consoleloglevel"]
else:
raise TypeError('consoleloglevel must be one of: "CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET"')
else:
raise TypeError("consoleloglevel must be type of str") | null |
168,157 | import re
from .default_parser import is_currency_valid, default_config_parse, merge_config_and_args
def parse_market(market):
if not is_market_valid(market):
raise ValueError(f'Dummy market invalid: {market}')
base_currency, quote_currency = market.split('-', 2)
return market, base_currency, quote_currency
def merge_config_and_args(exchange_config, args):
new_config = {}
if "config" in exchange_config and exchange_config["config"] is not None:
new_config = {**exchange_config["config"]}
for (key, value) in args.items():
if value is not None and value is not False:
new_config[key] = value
return new_config
def is_currency_valid(currency):
p = re.compile(r"^[0-9A-Z]{1,20}$")
return p.match(currency)
def default_config_parse(app, config):
"""
Requirements for bot options:
- Update _generate_banner() in controllers/PyCryptoBot.py
- Update the command line arguments below
- Update the config parser in models/config/default_parser.py
"""
def config_option_int(option_name: str = None, option_default: int = 0, store_name: str = None, value_min: int = None, value_max: int = None) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
if option_name in config:
if isinstance(config[option_name], int):
if value_min is not None and value_max is not None:
if config[option_name] >= value_min and config[option_name] <= value_max:
setattr(app, store_name, int(config[option_name]))
else:
raise TypeError(f"{option_name} is out of bounds")
else:
setattr(app, store_name, int(config[option_name]))
else:
raise TypeError(f"{option_name} must be a number")
else:
setattr(app, store_name, option_default) # default
return True
def config_option_float(
option_name: str = None, option_default: float = 0.0, store_name: str = None, value_min: float = None, value_max: float = None
) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
if option_name in config:
if isinstance(config[option_name], int) or isinstance(config[option_name], float):
if value_min is not None and value_max is not None:
if config[option_name] >= value_min and config[option_name] <= value_max:
setattr(app, store_name, float(config[option_name]))
else:
raise TypeError(f"{option_name} is out of bounds")
else:
setattr(app, store_name, float(config[option_name]))
else:
raise TypeError(f"{option_name} must be a number")
else:
setattr(app, store_name, option_default) # default
return True
def config_option_bool(option_name: str = None, option_default: bool = True, store_name: str = None, store_invert: bool = False) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
store_invert = False # legacy config does not need to be inverted
if option_name in config:
if isinstance(config[option_name], int):
if config[option_name] in [0, 1]:
if store_invert is True:
setattr(app, store_name, bool(not config[option_name]))
else:
setattr(app, store_name, bool(config[option_name]))
else:
raise TypeError(f"{option_name} must be of type int (0 or 1)")
else:
if store_invert is True:
setattr(app, store_name, (not option_default)) # default (if inverted - disabled)
else:
setattr(app, store_name, option_default) # default
return True
import sys
def config_option_list(option_name: str = None, option_default: str = "", store_name: str = None) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
if option_name in config:
if isinstance(config[option_name], list):
setattr(app, store_name, config[option_name])
else:
raise TypeError(f"{option_name} must be a list")
else:
setattr(app, store_name, option_default) # default
return True
def config_option_str(option_name: str = None, option_default: str = "", store_name: str = None, valid_options: list = [], disable_variable=None) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
if option_name in config:
if isinstance(config[option_name], str):
if config[option_name] in valid_options:
setattr(app, store_name, config[option_name])
if disable_variable is not None:
setattr(app, disable_variable, 0)
else:
raise TypeError(f"{option_name} is not a valid option")
else:
raise TypeError(f"{option_name} must be a string")
else:
setattr(app, store_name, option_default) # default
return True
def config_option_date(
option_name: str = None, option_default: str = "", store_name: str = None, date_format: str = "%Y-%m-%d", allow_now: bool = False
) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
if option_name in config:
if isinstance(config[option_name], str):
if allow_now is True and config[option_name] == "now":
setattr(app, store_name, str(datetime.today().strftime("%Y-%m-%d")))
else:
try:
datetime.strptime(config[option_name], date_format)
except ValueError:
raise ValueError(f"Incorrect data format, should be {date_format}")
setattr(app, store_name, str(config[option_name]))
else:
raise TypeError(f"{option_name} must be a date: {date_format}")
else:
setattr(app, store_name, option_default) # default
return True
# bespoke options with non-standard logic
if "market" in config and config["market"] is not None:
if app.exchange == Exchange.BINANCE:
p = re.compile(r"^[0-9A-Z]{4,25}$")
if p.match(config["market"]):
app.market = config["market"]
else:
# default market for Binance
app.market = "BTCGBP"
else:
if app.exchange != Exchange.COINBASE and app.exchange != Exchange.COINBASEPRO and app.exchange != Exchange.KUCOIN:
# default if no exchange set
app.exchange = Exchange.COINBASEPRO
# binance and kucoin
p = re.compile(r"^[0-9A-Z]{1,20}\-[1-9A-Z]{2,5}$")
if p.match(config["market"]):
app.market = config["market"]
else:
# default for coinbase pro and binance
app.market = "BTC-GBP"
if "granularity" in config and config["granularity"] is not None:
app.smart_switch = 0
if isinstance(config["granularity"], str) and not config["granularity"].isnumeric() is True:
app.granularity = Granularity.convert_to_enum(config["granularity"])
else:
app.granularity = Granularity.convert_to_enum(int(config["granularity"]))
# standard options
try:
term_width = get_terminal_size().columns
except OSError:
term_width = 180
config_option_bool(option_name="debug", option_default=False, store_name="debug", store_invert=False)
config_option_bool(option_name="termcolor", option_default=True, store_name="term_color", store_invert=False)
config_option_int(option_name="termwidth", option_default=term_width, store_name="term_width", value_min=60, value_max=420)
config_option_int(option_name="logwidth", option_default=180, store_name="log_width", value_min=60, value_max=420)
config_option_bool(option_name="live", option_default=False, store_name="is_live", store_invert=False)
config_option_bool(option_name="graphs", option_default=False, store_name="save_graphs", store_invert=False)
config_option_str(
option_name="sim", option_default=0, store_name="is_sim", valid_options=["slow", "fast", "slow-sample", "fast-sample"], disable_variable="is_live"
)
config_option_date(option_name="simstartdate", option_default=None, store_name="simstartdate", date_format="%Y-%m-%d", allow_now=False)
config_option_date(option_name="simenddate", option_default=None, store_name="simenddate", date_format="%Y-%m-%d", allow_now=True)
config_option_bool(option_name="simresultonly", option_default=False, store_name="simresultonly", store_invert=False)
config_option_bool(option_name="telegram", option_default=False, store_name="disabletelegram", store_invert=True)
config_option_bool(option_name="telegrambotcontrol", option_default=False, store_name="telegrambotcontrol", store_invert=False)
config_option_bool(option_name="telegramtradesonly", option_default=False, store_name="telegramtradesonly", store_invert=False)
config_option_bool(option_name="telegramerrormsgs", option_default=False, store_name="disabletelegramerrormsgs", store_invert=True)
config_option_bool(option_name="stats", option_default=False, store_name="stats", store_invert=False)
config_option_list(option_name="statgroup", option_default="", store_name="statgroup")
config_option_date(option_name="statstartdate", option_default=None, store_name="statstartdate", date_format="%Y-%m-%d", allow_now=False)
config_option_bool(option_name="statdetail", option_default=False, store_name="statdetail", store_invert=False)
config_option_bool(option_name="log", option_default=True, store_name="disablelog", store_invert=True)
config_option_bool(option_name="smartswitch", option_default=False, store_name="smart_switch", store_invert=False)
config_option_bool(option_name="tradetracker", option_default=False, store_name="disabletracker", store_invert=True)
config_option_bool(option_name="autorestart", option_default=False, store_name="autorestart", store_invert=False)
config_option_bool(option_name="websocket", option_default=False, store_name="websocket", store_invert=False)
config_option_bool(option_name="insufficientfundslogging", option_default=False, store_name="enableinsufficientfundslogging", store_invert=False)
config_option_bool(option_name="logbuysellinjson", option_default=False, store_name="logbuysellinjson", store_invert=False)
config_option_bool(option_name="manualtradesonly", option_default=False, store_name="manual_trades_only", store_invert=False)
config_option_str(option_name="startmethod", option_default="standard", store_name="startmethod", valid_options=["scanner", "standard", "telegram"])
config_option_int(option_name="recvwindow", option_default=5000, store_name="recv_window", value_min=5000, value_max=60000)
config_option_str(option_name="lastaction", option_default=None, store_name="last_action", valid_options=["BUY", "SELL"])
config_option_bool(option_name="kucoincache", option_default=False, store_name="usekucoincache", store_invert=False)
config_option_bool(option_name="exitaftersell", option_default=False, store_name="exitaftersell", store_invert=False)
config_option_int(option_name="adjusttotalperiods", option_default=300, store_name="adjusttotalperiods", value_min=200, value_max=500)
config_option_float(option_name="buypercent", option_default=100, store_name="buypercent", value_min=0, value_max=100)
config_option_float(option_name="sellpercent", option_default=100, store_name="sellpercent", value_min=0, value_max=100)
config_option_float(option_name="sellupperpcnt", option_default=None, store_name="sell_upper_pcnt", value_min=0, value_max=100)
config_option_float(option_name="selllowerpcnt", option_default=None, store_name="sell_lower_pcnt", value_min=-100, value_max=0)
config_option_float(option_name="nosellmaxpcnt", option_default=None, store_name="nosellmaxpcnt", value_min=0, value_max=100)
config_option_float(option_name="nosellminpcnt", option_default=None, store_name="nosellminpcnt", value_min=-100, value_max=0)
config_option_bool(option_name="preventloss", option_default=False, store_name="preventloss", store_invert=False)
config_option_float(option_name="preventlosstrigger", option_default=1.0, store_name="preventlosstrigger", value_min=0, value_max=100)
config_option_float(option_name="preventlossmargin", option_default=0.1, store_name="preventlossmargin", value_min=0, value_max=100)
config_option_bool(option_name="sellatloss", option_default=True, store_name="sellatloss", store_invert=False)
config_option_bool(option_name="sellatresistance", option_default=False, store_name="sellatresistance", store_invert=False)
config_option_bool(option_name="sellatfibonaccilow", option_default=False, store_name="disablefailsafefibonaccilow", store_invert=True)
config_option_bool(option_name="bullonly", option_default=False, store_name="disablebullonly", store_invert=True)
config_option_bool(option_name="profitbankreversal", option_default=False, store_name="disableprofitbankreversal", store_invert=True)
config_option_float(option_name="trailingstoploss", option_default=0.0, store_name="trailing_stop_loss", value_min=-100, value_max=0)
config_option_float(option_name="trailingstoplosstrigger", option_default=0.0, store_name="trailing_stop_loss_trigger", value_min=0, value_max=100)
config_option_float(option_name="trailingsellpcnt", option_default=0.0, store_name="trailingsellpcnt", value_min=-100, value_max=0)
config_option_bool(option_name="trailingimmediatesell", option_default=False, store_name="trailingimmediatesell", store_invert=False)
config_option_float(option_name="trailingsellimmediatepcnt", option_default=0.0, store_name="trailingsellimmediatepcnt", value_min=-100, value_max=0)
config_option_float(option_name="trailingsellbailoutpcnt", option_default=0.0, store_name="trailingsellbailoutpcnt", value_min=-100, value_max=100)
config_option_bool(option_name="dynamictsl", option_default=False, store_name="dynamic_tsl", store_invert=False)
config_option_float(option_name="tslmultiplier", option_default=1.1, store_name="tsl_multiplier", value_min=0, value_max=100)
config_option_float(option_name="tsltriggermultiplier", option_default=1.1, store_name="tsl_trigger_multiplier", value_min=0, value_max=100)
config_option_float(option_name="tslmaxpcnt", option_default=-5.0, store_name="tsl_max_pcnt", value_min=-100, value_max=0)
config_option_float(option_name="buyminsize", option_default=0.0, store_name="buyminsize")
config_option_float(option_name="buymaxsize", option_default=0.0, store_name="buymaxsize")
config_option_bool(option_name="buylastsellsize", option_default=False, store_name="buylastsellsize", store_invert=False)
config_option_bool(option_name="marketmultibuycheck", option_default=False, store_name="marketmultibuycheck", store_invert=False)
config_option_bool(option_name="buynearhigh", option_default=True, store_name="disablebuynearhigh", store_invert=True)
config_option_float(option_name="buynearhighpcnt", option_default=3.0, store_name="nobuynearhighpcnt", value_min=0, value_max=100)
config_option_float(option_name="trailingbuypcnt", option_default=0.0, store_name="trailingbuypcnt", value_min=0, value_max=100)
config_option_bool(option_name="trailingimmediatebuy", option_default=False, store_name="trailingimmediatebuy", store_invert=False)
config_option_float(option_name="trailingbuyimmediatepcnt", option_default=0.0, store_name="trailingbuyimmediatepcnt", value_min=0, value_max=100)
config_option_bool(option_name="selltriggeroverride", option_default=False, store_name="selltriggeroverride", store_invert=False)
config_option_bool(option_name="ema1226", option_default=True, store_name="disablebuyema", store_invert=True)
config_option_bool(option_name="macdsignal", option_default=True, store_name="disablebuymacd", store_invert=True)
config_option_bool(option_name="obv", option_default=False, store_name="disablebuyobv", store_invert=True)
config_option_bool(option_name="elderray", option_default=False, store_name="disablebuyelderray", store_invert=True)
config_option_bool(option_name="bbands_s1", option_default=False, store_name="disablebuybbands_s1", store_invert=True)
config_option_bool(option_name="bbands_s2", option_default=False, store_name="disablebuybbands_s2", store_invert=True)
def parser(app, dummy_config, args={}):
if not dummy_config:
raise Exception('There is an error in your config dictionary')
if not app:
raise Exception('No app is passed')
config = merge_config_and_args(dummy_config, args)
default_config_parse(app, config)
if 'base_currency' in config and config['base_currency'] is not None:
if not is_currency_valid(config['base_currency']):
raise TypeError('Base currency is invalid.')
base_currency = config['base_currency']
if 'quote_currency' in config and config['quote_currency'] is not None:
if not is_currency_valid(config['quote_currency']):
raise TypeError('Quote currency is invalid.')
quote_currency = config['quote_currency']
if 'market' in config and config['market'] is not None:
market, base_currency, quote_currency = parse_market(config['market'])
if base_currency != '' and quote_currency != '':
market = base_currency + '-' + quote_currency # noqa: F841
else:
raise Exception('There is an error in your config dictionary') | null |
168,158 | import ast
import json
import os.path
import re
import sys
from .default_parser import is_currency_valid, default_config_parse, merge_config_and_args
from models.exchange.Granularity import Granularity
def parse_market(market):
if not is_market_valid(market):
raise ValueError(f"Coinbase market invalid: {market}")
base_currency, quote_currency = market.split("-", 2)
return market, base_currency, quote_currency
def merge_config_and_args(exchange_config, args):
new_config = {}
if "config" in exchange_config and exchange_config["config"] is not None:
new_config = {**exchange_config["config"]}
for (key, value) in args.items():
if value is not None and value is not False:
new_config[key] = value
return new_config
def is_currency_valid(currency):
p = re.compile(r"^[0-9A-Z]{1,20}$")
return p.match(currency)
def default_config_parse(app, config):
"""
Requirements for bot options:
- Update _generate_banner() in controllers/PyCryptoBot.py
- Update the command line arguments below
- Update the config parser in models/config/default_parser.py
"""
def config_option_int(option_name: str = None, option_default: int = 0, store_name: str = None, value_min: int = None, value_max: int = None) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
if option_name in config:
if isinstance(config[option_name], int):
if value_min is not None and value_max is not None:
if config[option_name] >= value_min and config[option_name] <= value_max:
setattr(app, store_name, int(config[option_name]))
else:
raise TypeError(f"{option_name} is out of bounds")
else:
setattr(app, store_name, int(config[option_name]))
else:
raise TypeError(f"{option_name} must be a number")
else:
setattr(app, store_name, option_default) # default
return True
def config_option_float(
option_name: str = None, option_default: float = 0.0, store_name: str = None, value_min: float = None, value_max: float = None
) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
if option_name in config:
if isinstance(config[option_name], int) or isinstance(config[option_name], float):
if value_min is not None and value_max is not None:
if config[option_name] >= value_min and config[option_name] <= value_max:
setattr(app, store_name, float(config[option_name]))
else:
raise TypeError(f"{option_name} is out of bounds")
else:
setattr(app, store_name, float(config[option_name]))
else:
raise TypeError(f"{option_name} must be a number")
else:
setattr(app, store_name, option_default) # default
return True
def config_option_bool(option_name: str = None, option_default: bool = True, store_name: str = None, store_invert: bool = False) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
store_invert = False # legacy config does not need to be inverted
if option_name in config:
if isinstance(config[option_name], int):
if config[option_name] in [0, 1]:
if store_invert is True:
setattr(app, store_name, bool(not config[option_name]))
else:
setattr(app, store_name, bool(config[option_name]))
else:
raise TypeError(f"{option_name} must be of type int (0 or 1)")
else:
if store_invert is True:
setattr(app, store_name, (not option_default)) # default (if inverted - disabled)
else:
setattr(app, store_name, option_default) # default
return True
import sys
def config_option_list(option_name: str = None, option_default: str = "", store_name: str = None) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
if option_name in config:
if isinstance(config[option_name], list):
setattr(app, store_name, config[option_name])
else:
raise TypeError(f"{option_name} must be a list")
else:
setattr(app, store_name, option_default) # default
return True
def config_option_str(option_name: str = None, option_default: str = "", store_name: str = None, valid_options: list = [], disable_variable=None) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
if option_name in config:
if isinstance(config[option_name], str):
if config[option_name] in valid_options:
setattr(app, store_name, config[option_name])
if disable_variable is not None:
setattr(app, disable_variable, 0)
else:
raise TypeError(f"{option_name} is not a valid option")
else:
raise TypeError(f"{option_name} must be a string")
else:
setattr(app, store_name, option_default) # default
return True
def config_option_date(
option_name: str = None, option_default: str = "", store_name: str = None, date_format: str = "%Y-%m-%d", allow_now: bool = False
) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
if option_name in config:
if isinstance(config[option_name], str):
if allow_now is True and config[option_name] == "now":
setattr(app, store_name, str(datetime.today().strftime("%Y-%m-%d")))
else:
try:
datetime.strptime(config[option_name], date_format)
except ValueError:
raise ValueError(f"Incorrect data format, should be {date_format}")
setattr(app, store_name, str(config[option_name]))
else:
raise TypeError(f"{option_name} must be a date: {date_format}")
else:
setattr(app, store_name, option_default) # default
return True
# bespoke options with non-standard logic
if "market" in config and config["market"] is not None:
if app.exchange == Exchange.BINANCE:
p = re.compile(r"^[0-9A-Z]{4,25}$")
if p.match(config["market"]):
app.market = config["market"]
else:
# default market for Binance
app.market = "BTCGBP"
else:
if app.exchange != Exchange.COINBASE and app.exchange != Exchange.COINBASEPRO and app.exchange != Exchange.KUCOIN:
# default if no exchange set
app.exchange = Exchange.COINBASEPRO
# binance and kucoin
p = re.compile(r"^[0-9A-Z]{1,20}\-[1-9A-Z]{2,5}$")
if p.match(config["market"]):
app.market = config["market"]
else:
# default for coinbase pro and binance
app.market = "BTC-GBP"
if "granularity" in config and config["granularity"] is not None:
app.smart_switch = 0
if isinstance(config["granularity"], str) and not config["granularity"].isnumeric() is True:
app.granularity = Granularity.convert_to_enum(config["granularity"])
else:
app.granularity = Granularity.convert_to_enum(int(config["granularity"]))
# standard options
try:
term_width = get_terminal_size().columns
except OSError:
term_width = 180
config_option_bool(option_name="debug", option_default=False, store_name="debug", store_invert=False)
config_option_bool(option_name="termcolor", option_default=True, store_name="term_color", store_invert=False)
config_option_int(option_name="termwidth", option_default=term_width, store_name="term_width", value_min=60, value_max=420)
config_option_int(option_name="logwidth", option_default=180, store_name="log_width", value_min=60, value_max=420)
config_option_bool(option_name="live", option_default=False, store_name="is_live", store_invert=False)
config_option_bool(option_name="graphs", option_default=False, store_name="save_graphs", store_invert=False)
config_option_str(
option_name="sim", option_default=0, store_name="is_sim", valid_options=["slow", "fast", "slow-sample", "fast-sample"], disable_variable="is_live"
)
config_option_date(option_name="simstartdate", option_default=None, store_name="simstartdate", date_format="%Y-%m-%d", allow_now=False)
config_option_date(option_name="simenddate", option_default=None, store_name="simenddate", date_format="%Y-%m-%d", allow_now=True)
config_option_bool(option_name="simresultonly", option_default=False, store_name="simresultonly", store_invert=False)
config_option_bool(option_name="telegram", option_default=False, store_name="disabletelegram", store_invert=True)
config_option_bool(option_name="telegrambotcontrol", option_default=False, store_name="telegrambotcontrol", store_invert=False)
config_option_bool(option_name="telegramtradesonly", option_default=False, store_name="telegramtradesonly", store_invert=False)
config_option_bool(option_name="telegramerrormsgs", option_default=False, store_name="disabletelegramerrormsgs", store_invert=True)
config_option_bool(option_name="stats", option_default=False, store_name="stats", store_invert=False)
config_option_list(option_name="statgroup", option_default="", store_name="statgroup")
config_option_date(option_name="statstartdate", option_default=None, store_name="statstartdate", date_format="%Y-%m-%d", allow_now=False)
config_option_bool(option_name="statdetail", option_default=False, store_name="statdetail", store_invert=False)
config_option_bool(option_name="log", option_default=True, store_name="disablelog", store_invert=True)
config_option_bool(option_name="smartswitch", option_default=False, store_name="smart_switch", store_invert=False)
config_option_bool(option_name="tradetracker", option_default=False, store_name="disabletracker", store_invert=True)
config_option_bool(option_name="autorestart", option_default=False, store_name="autorestart", store_invert=False)
config_option_bool(option_name="websocket", option_default=False, store_name="websocket", store_invert=False)
config_option_bool(option_name="insufficientfundslogging", option_default=False, store_name="enableinsufficientfundslogging", store_invert=False)
config_option_bool(option_name="logbuysellinjson", option_default=False, store_name="logbuysellinjson", store_invert=False)
config_option_bool(option_name="manualtradesonly", option_default=False, store_name="manual_trades_only", store_invert=False)
config_option_str(option_name="startmethod", option_default="standard", store_name="startmethod", valid_options=["scanner", "standard", "telegram"])
config_option_int(option_name="recvwindow", option_default=5000, store_name="recv_window", value_min=5000, value_max=60000)
config_option_str(option_name="lastaction", option_default=None, store_name="last_action", valid_options=["BUY", "SELL"])
config_option_bool(option_name="kucoincache", option_default=False, store_name="usekucoincache", store_invert=False)
config_option_bool(option_name="exitaftersell", option_default=False, store_name="exitaftersell", store_invert=False)
config_option_int(option_name="adjusttotalperiods", option_default=300, store_name="adjusttotalperiods", value_min=200, value_max=500)
config_option_float(option_name="buypercent", option_default=100, store_name="buypercent", value_min=0, value_max=100)
config_option_float(option_name="sellpercent", option_default=100, store_name="sellpercent", value_min=0, value_max=100)
config_option_float(option_name="sellupperpcnt", option_default=None, store_name="sell_upper_pcnt", value_min=0, value_max=100)
config_option_float(option_name="selllowerpcnt", option_default=None, store_name="sell_lower_pcnt", value_min=-100, value_max=0)
config_option_float(option_name="nosellmaxpcnt", option_default=None, store_name="nosellmaxpcnt", value_min=0, value_max=100)
config_option_float(option_name="nosellminpcnt", option_default=None, store_name="nosellminpcnt", value_min=-100, value_max=0)
config_option_bool(option_name="preventloss", option_default=False, store_name="preventloss", store_invert=False)
config_option_float(option_name="preventlosstrigger", option_default=1.0, store_name="preventlosstrigger", value_min=0, value_max=100)
config_option_float(option_name="preventlossmargin", option_default=0.1, store_name="preventlossmargin", value_min=0, value_max=100)
config_option_bool(option_name="sellatloss", option_default=True, store_name="sellatloss", store_invert=False)
config_option_bool(option_name="sellatresistance", option_default=False, store_name="sellatresistance", store_invert=False)
config_option_bool(option_name="sellatfibonaccilow", option_default=False, store_name="disablefailsafefibonaccilow", store_invert=True)
config_option_bool(option_name="bullonly", option_default=False, store_name="disablebullonly", store_invert=True)
config_option_bool(option_name="profitbankreversal", option_default=False, store_name="disableprofitbankreversal", store_invert=True)
config_option_float(option_name="trailingstoploss", option_default=0.0, store_name="trailing_stop_loss", value_min=-100, value_max=0)
config_option_float(option_name="trailingstoplosstrigger", option_default=0.0, store_name="trailing_stop_loss_trigger", value_min=0, value_max=100)
config_option_float(option_name="trailingsellpcnt", option_default=0.0, store_name="trailingsellpcnt", value_min=-100, value_max=0)
config_option_bool(option_name="trailingimmediatesell", option_default=False, store_name="trailingimmediatesell", store_invert=False)
config_option_float(option_name="trailingsellimmediatepcnt", option_default=0.0, store_name="trailingsellimmediatepcnt", value_min=-100, value_max=0)
config_option_float(option_name="trailingsellbailoutpcnt", option_default=0.0, store_name="trailingsellbailoutpcnt", value_min=-100, value_max=100)
config_option_bool(option_name="dynamictsl", option_default=False, store_name="dynamic_tsl", store_invert=False)
config_option_float(option_name="tslmultiplier", option_default=1.1, store_name="tsl_multiplier", value_min=0, value_max=100)
config_option_float(option_name="tsltriggermultiplier", option_default=1.1, store_name="tsl_trigger_multiplier", value_min=0, value_max=100)
config_option_float(option_name="tslmaxpcnt", option_default=-5.0, store_name="tsl_max_pcnt", value_min=-100, value_max=0)
config_option_float(option_name="buyminsize", option_default=0.0, store_name="buyminsize")
config_option_float(option_name="buymaxsize", option_default=0.0, store_name="buymaxsize")
config_option_bool(option_name="buylastsellsize", option_default=False, store_name="buylastsellsize", store_invert=False)
config_option_bool(option_name="marketmultibuycheck", option_default=False, store_name="marketmultibuycheck", store_invert=False)
config_option_bool(option_name="buynearhigh", option_default=True, store_name="disablebuynearhigh", store_invert=True)
config_option_float(option_name="buynearhighpcnt", option_default=3.0, store_name="nobuynearhighpcnt", value_min=0, value_max=100)
config_option_float(option_name="trailingbuypcnt", option_default=0.0, store_name="trailingbuypcnt", value_min=0, value_max=100)
config_option_bool(option_name="trailingimmediatebuy", option_default=False, store_name="trailingimmediatebuy", store_invert=False)
config_option_float(option_name="trailingbuyimmediatepcnt", option_default=0.0, store_name="trailingbuyimmediatepcnt", value_min=0, value_max=100)
config_option_bool(option_name="selltriggeroverride", option_default=False, store_name="selltriggeroverride", store_invert=False)
config_option_bool(option_name="ema1226", option_default=True, store_name="disablebuyema", store_invert=True)
config_option_bool(option_name="macdsignal", option_default=True, store_name="disablebuymacd", store_invert=True)
config_option_bool(option_name="obv", option_default=False, store_name="disablebuyobv", store_invert=True)
config_option_bool(option_name="elderray", option_default=False, store_name="disablebuyelderray", store_invert=True)
config_option_bool(option_name="bbands_s1", option_default=False, store_name="disablebuybbands_s1", store_invert=True)
config_option_bool(option_name="bbands_s2", option_default=False, store_name="disablebuybbands_s2", store_invert=True)
class Granularity(Enum):
ONE_MINUTE = 60, "1m", "1min", "1T"
FIVE_MINUTES = 300, "5m", "5min", "5T"
FIFTEEN_MINUTES = 900, "15m", "15min", "15T"
THIRTY_MINUTES = 1800, "30m", "30min", "30T"
ONE_HOUR = 3600, "1h", "1hour", "1H"
SIX_HOURS = 21600, "6h", "6hour", "6H"
ONE_DAY = 86400, "1d", "1day", "1D"
def __init__(self, integer, short, medium, frequency):
self.integer = integer
self.short = short
self.medium = medium
self.frequency = frequency
def convert_to_enum(value):
for granularity in Granularity:
for enum_value in granularity.value:
if enum_value == value:
return granularity
raise ValueError("Invalid Granularity")
def to_short(self):
return self.short
def to_integer(self):
return self.integer
def to_medium(self):
return self.medium
def get_frequency(self):
return self.frequency
def parser(app, coinbase_config, args={}):
if not app:
raise Exception("No app is passed")
if isinstance(coinbase_config, dict):
if "api_key" in coinbase_config or "api_secret" in coinbase_config:
print(">>> migrating api keys to coinbase.key <<<\n")
# create 'coinbase.key'
fh = open("coinbase.key", "w")
fh.write(f"{coinbase_config['api_key']}\n{coinbase_config['api_secret']}")
fh.close()
if os.path.isfile("config.json") and os.path.isfile("coinbase.key"):
coinbase_config["api_key_file"] = coinbase_config.pop("api_key")
coinbase_config["api_key_file"] = "coinbase.key"
del coinbase_config["api_secret"]
# read 'coinbase' element from config.json
fh = open("config.json", "r")
config_json = ast.literal_eval(fh.read())
config_json["coinbase"] = coinbase_config
fh.close()
# write new 'coinbase' element
fh = open("config.json", "w")
fh.write(json.dumps(config_json, indent=4))
fh.close()
app.api_key_file = "coinbase.key"
if "api_key_file" in args and args["api_key_file"] is not None:
app.api_key_file = args["api_key_file"]
elif "api_key_file" in coinbase_config:
app.api_key_file = coinbase_config["api_key_file"]
if app.api_key_file is not None:
if not os.path.isfile(app.api_key_file):
try:
raise Exception(f"Unable to read {app.api_key_file}, please check the file exists and is readable. Remove \"api_key_file\" key from the config file for test mode!\n")
except Exception as e:
print(f"{type(e).__name__}: {e}")
sys.exit(1)
else:
try:
with open(app.api_key_file, "r") as f:
key = f.readline().strip()
secret = f.readline().strip()
coinbase_config["api_key"] = key
coinbase_config["api_secret"] = secret
except Exception:
raise RuntimeError(f"Unable to read {app.api_key_file}")
if "api_url" in coinbase_config["config"]:
coinbase_config["api_url"] = coinbase_config["config"]["api_url"]
if "api_key" in coinbase_config and "api_secret" in coinbase_config and "api_url" in coinbase_config:
# validates the api key is syntactically correct
p = re.compile(r"^[A-z0-9]{16,16}$")
if not p.match(coinbase_config["api_key"]):
raise TypeError("Coinbase API key is invalid")
app.api_key = coinbase_config["api_key"]
# validates the api secret is syntactically correct
p = re.compile(r"^[A-z0-9]{32,32}$")
if not p.match(coinbase_config["api_secret"]):
raise TypeError("Coinbase API secret is invalid")
app.api_secret = coinbase_config["api_secret"]
valid_urls = [
"https://api.coinbase.com/",
"https://api.coinbase.com",
]
# validate Coinbase API
if coinbase_config["api_url"] not in valid_urls:
raise ValueError("Coinbase API URL is invalid")
app.api_url = coinbase_config["api_url"]
else:
coinbase_config = {}
config = merge_config_and_args(coinbase_config, args)
default_config_parse(app, config)
if "base_currency" in config and config["base_currency"] is not None:
if not is_currency_valid(config["base_currency"]):
raise TypeError("Base currency is invalid.")
app.base_currency = config["base_currency"]
if "quote_currency" in config and config["quote_currency"] is not None:
if not is_currency_valid(config["quote_currency"]):
raise TypeError("Quote currency is invalid.")
app.quote_currency = config["quote_currency"]
if "market" in config and config["market"] is not None:
app.market, app.base_currency, app.quote_currency = parse_market(config["market"])
if app.base_currency != "" and app.quote_currency != "":
app.market = app.base_currency + "-" + app.quote_currency
if "granularity" in config and config["granularity"] is not None:
if isinstance(config["granularity"], str) and config["granularity"].isnumeric() is True:
app.granularity = Granularity.convert_to_enum(int(config["granularity"]))
elif isinstance(config["granularity"], int):
app.granularity = Granularity.convert_to_enum(config["granularity"]) | null |
168,159 | import ast
import json
import os.path
import re
import sys
from .default_parser import is_currency_valid, default_config_parse, merge_config_and_args
def parse_market(market):
base_currency = "BTC"
quote_currency = "GBP"
if not is_market_valid(market):
raise ValueError(f"Binance market invalid: {market}")
quote_currencies = [
"BTC",
"BNB",
"ETH",
"USDT",
"TUSD",
"DAX",
"NGN",
"RUB",
"TRY",
"EUR",
"GBP",
"ZAR",
"UAH",
"DAI",
"BIDR",
"AUD",
"USD",
"NGN",
"BRL",
"BVND",
"VAI",
]
for qc in quote_currencies:
if market.endswith(qc):
base_currency = market.replace(qc, "")
quote_currency = qc
break
if len(market) != len(base_currency) + len(quote_currency):
raise ValueError("Binance market error.")
return market, base_currency, quote_currency
def merge_config_and_args(exchange_config, args):
new_config = {}
if "config" in exchange_config and exchange_config["config"] is not None:
new_config = {**exchange_config["config"]}
for (key, value) in args.items():
if value is not None and value is not False:
new_config[key] = value
return new_config
def is_currency_valid(currency):
p = re.compile(r"^[0-9A-Z]{1,20}$")
return p.match(currency)
def default_config_parse(app, config):
"""
Requirements for bot options:
- Update _generate_banner() in controllers/PyCryptoBot.py
- Update the command line arguments below
- Update the config parser in models/config/default_parser.py
"""
def config_option_int(option_name: str = None, option_default: int = 0, store_name: str = None, value_min: int = None, value_max: int = None) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
if option_name in config:
if isinstance(config[option_name], int):
if value_min is not None and value_max is not None:
if config[option_name] >= value_min and config[option_name] <= value_max:
setattr(app, store_name, int(config[option_name]))
else:
raise TypeError(f"{option_name} is out of bounds")
else:
setattr(app, store_name, int(config[option_name]))
else:
raise TypeError(f"{option_name} must be a number")
else:
setattr(app, store_name, option_default) # default
return True
def config_option_float(
option_name: str = None, option_default: float = 0.0, store_name: str = None, value_min: float = None, value_max: float = None
) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
if option_name in config:
if isinstance(config[option_name], int) or isinstance(config[option_name], float):
if value_min is not None and value_max is not None:
if config[option_name] >= value_min and config[option_name] <= value_max:
setattr(app, store_name, float(config[option_name]))
else:
raise TypeError(f"{option_name} is out of bounds")
else:
setattr(app, store_name, float(config[option_name]))
else:
raise TypeError(f"{option_name} must be a number")
else:
setattr(app, store_name, option_default) # default
return True
def config_option_bool(option_name: str = None, option_default: bool = True, store_name: str = None, store_invert: bool = False) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
store_invert = False # legacy config does not need to be inverted
if option_name in config:
if isinstance(config[option_name], int):
if config[option_name] in [0, 1]:
if store_invert is True:
setattr(app, store_name, bool(not config[option_name]))
else:
setattr(app, store_name, bool(config[option_name]))
else:
raise TypeError(f"{option_name} must be of type int (0 or 1)")
else:
if store_invert is True:
setattr(app, store_name, (not option_default)) # default (if inverted - disabled)
else:
setattr(app, store_name, option_default) # default
return True
import sys
def config_option_list(option_name: str = None, option_default: str = "", store_name: str = None) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
if option_name in config:
if isinstance(config[option_name], list):
setattr(app, store_name, config[option_name])
else:
raise TypeError(f"{option_name} must be a list")
else:
setattr(app, store_name, option_default) # default
return True
def config_option_str(option_name: str = None, option_default: str = "", store_name: str = None, valid_options: list = [], disable_variable=None) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
if option_name in config:
if isinstance(config[option_name], str):
if config[option_name] in valid_options:
setattr(app, store_name, config[option_name])
if disable_variable is not None:
setattr(app, disable_variable, 0)
else:
raise TypeError(f"{option_name} is not a valid option")
else:
raise TypeError(f"{option_name} must be a string")
else:
setattr(app, store_name, option_default) # default
return True
def config_option_date(
option_name: str = None, option_default: str = "", store_name: str = None, date_format: str = "%Y-%m-%d", allow_now: bool = False
) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
if option_name in config:
if isinstance(config[option_name], str):
if allow_now is True and config[option_name] == "now":
setattr(app, store_name, str(datetime.today().strftime("%Y-%m-%d")))
else:
try:
datetime.strptime(config[option_name], date_format)
except ValueError:
raise ValueError(f"Incorrect data format, should be {date_format}")
setattr(app, store_name, str(config[option_name]))
else:
raise TypeError(f"{option_name} must be a date: {date_format}")
else:
setattr(app, store_name, option_default) # default
return True
# bespoke options with non-standard logic
if "market" in config and config["market"] is not None:
if app.exchange == Exchange.BINANCE:
p = re.compile(r"^[0-9A-Z]{4,25}$")
if p.match(config["market"]):
app.market = config["market"]
else:
# default market for Binance
app.market = "BTCGBP"
else:
if app.exchange != Exchange.COINBASE and app.exchange != Exchange.COINBASEPRO and app.exchange != Exchange.KUCOIN:
# default if no exchange set
app.exchange = Exchange.COINBASEPRO
# binance and kucoin
p = re.compile(r"^[0-9A-Z]{1,20}\-[1-9A-Z]{2,5}$")
if p.match(config["market"]):
app.market = config["market"]
else:
# default for coinbase pro and binance
app.market = "BTC-GBP"
if "granularity" in config and config["granularity"] is not None:
app.smart_switch = 0
if isinstance(config["granularity"], str) and not config["granularity"].isnumeric() is True:
app.granularity = Granularity.convert_to_enum(config["granularity"])
else:
app.granularity = Granularity.convert_to_enum(int(config["granularity"]))
# standard options
try:
term_width = get_terminal_size().columns
except OSError:
term_width = 180
config_option_bool(option_name="debug", option_default=False, store_name="debug", store_invert=False)
config_option_bool(option_name="termcolor", option_default=True, store_name="term_color", store_invert=False)
config_option_int(option_name="termwidth", option_default=term_width, store_name="term_width", value_min=60, value_max=420)
config_option_int(option_name="logwidth", option_default=180, store_name="log_width", value_min=60, value_max=420)
config_option_bool(option_name="live", option_default=False, store_name="is_live", store_invert=False)
config_option_bool(option_name="graphs", option_default=False, store_name="save_graphs", store_invert=False)
config_option_str(
option_name="sim", option_default=0, store_name="is_sim", valid_options=["slow", "fast", "slow-sample", "fast-sample"], disable_variable="is_live"
)
config_option_date(option_name="simstartdate", option_default=None, store_name="simstartdate", date_format="%Y-%m-%d", allow_now=False)
config_option_date(option_name="simenddate", option_default=None, store_name="simenddate", date_format="%Y-%m-%d", allow_now=True)
config_option_bool(option_name="simresultonly", option_default=False, store_name="simresultonly", store_invert=False)
config_option_bool(option_name="telegram", option_default=False, store_name="disabletelegram", store_invert=True)
config_option_bool(option_name="telegrambotcontrol", option_default=False, store_name="telegrambotcontrol", store_invert=False)
config_option_bool(option_name="telegramtradesonly", option_default=False, store_name="telegramtradesonly", store_invert=False)
config_option_bool(option_name="telegramerrormsgs", option_default=False, store_name="disabletelegramerrormsgs", store_invert=True)
config_option_bool(option_name="stats", option_default=False, store_name="stats", store_invert=False)
config_option_list(option_name="statgroup", option_default="", store_name="statgroup")
config_option_date(option_name="statstartdate", option_default=None, store_name="statstartdate", date_format="%Y-%m-%d", allow_now=False)
config_option_bool(option_name="statdetail", option_default=False, store_name="statdetail", store_invert=False)
config_option_bool(option_name="log", option_default=True, store_name="disablelog", store_invert=True)
config_option_bool(option_name="smartswitch", option_default=False, store_name="smart_switch", store_invert=False)
config_option_bool(option_name="tradetracker", option_default=False, store_name="disabletracker", store_invert=True)
config_option_bool(option_name="autorestart", option_default=False, store_name="autorestart", store_invert=False)
config_option_bool(option_name="websocket", option_default=False, store_name="websocket", store_invert=False)
config_option_bool(option_name="insufficientfundslogging", option_default=False, store_name="enableinsufficientfundslogging", store_invert=False)
config_option_bool(option_name="logbuysellinjson", option_default=False, store_name="logbuysellinjson", store_invert=False)
config_option_bool(option_name="manualtradesonly", option_default=False, store_name="manual_trades_only", store_invert=False)
config_option_str(option_name="startmethod", option_default="standard", store_name="startmethod", valid_options=["scanner", "standard", "telegram"])
config_option_int(option_name="recvwindow", option_default=5000, store_name="recv_window", value_min=5000, value_max=60000)
config_option_str(option_name="lastaction", option_default=None, store_name="last_action", valid_options=["BUY", "SELL"])
config_option_bool(option_name="kucoincache", option_default=False, store_name="usekucoincache", store_invert=False)
config_option_bool(option_name="exitaftersell", option_default=False, store_name="exitaftersell", store_invert=False)
config_option_int(option_name="adjusttotalperiods", option_default=300, store_name="adjusttotalperiods", value_min=200, value_max=500)
config_option_float(option_name="buypercent", option_default=100, store_name="buypercent", value_min=0, value_max=100)
config_option_float(option_name="sellpercent", option_default=100, store_name="sellpercent", value_min=0, value_max=100)
config_option_float(option_name="sellupperpcnt", option_default=None, store_name="sell_upper_pcnt", value_min=0, value_max=100)
config_option_float(option_name="selllowerpcnt", option_default=None, store_name="sell_lower_pcnt", value_min=-100, value_max=0)
config_option_float(option_name="nosellmaxpcnt", option_default=None, store_name="nosellmaxpcnt", value_min=0, value_max=100)
config_option_float(option_name="nosellminpcnt", option_default=None, store_name="nosellminpcnt", value_min=-100, value_max=0)
config_option_bool(option_name="preventloss", option_default=False, store_name="preventloss", store_invert=False)
config_option_float(option_name="preventlosstrigger", option_default=1.0, store_name="preventlosstrigger", value_min=0, value_max=100)
config_option_float(option_name="preventlossmargin", option_default=0.1, store_name="preventlossmargin", value_min=0, value_max=100)
config_option_bool(option_name="sellatloss", option_default=True, store_name="sellatloss", store_invert=False)
config_option_bool(option_name="sellatresistance", option_default=False, store_name="sellatresistance", store_invert=False)
config_option_bool(option_name="sellatfibonaccilow", option_default=False, store_name="disablefailsafefibonaccilow", store_invert=True)
config_option_bool(option_name="bullonly", option_default=False, store_name="disablebullonly", store_invert=True)
config_option_bool(option_name="profitbankreversal", option_default=False, store_name="disableprofitbankreversal", store_invert=True)
config_option_float(option_name="trailingstoploss", option_default=0.0, store_name="trailing_stop_loss", value_min=-100, value_max=0)
config_option_float(option_name="trailingstoplosstrigger", option_default=0.0, store_name="trailing_stop_loss_trigger", value_min=0, value_max=100)
config_option_float(option_name="trailingsellpcnt", option_default=0.0, store_name="trailingsellpcnt", value_min=-100, value_max=0)
config_option_bool(option_name="trailingimmediatesell", option_default=False, store_name="trailingimmediatesell", store_invert=False)
config_option_float(option_name="trailingsellimmediatepcnt", option_default=0.0, store_name="trailingsellimmediatepcnt", value_min=-100, value_max=0)
config_option_float(option_name="trailingsellbailoutpcnt", option_default=0.0, store_name="trailingsellbailoutpcnt", value_min=-100, value_max=100)
config_option_bool(option_name="dynamictsl", option_default=False, store_name="dynamic_tsl", store_invert=False)
config_option_float(option_name="tslmultiplier", option_default=1.1, store_name="tsl_multiplier", value_min=0, value_max=100)
config_option_float(option_name="tsltriggermultiplier", option_default=1.1, store_name="tsl_trigger_multiplier", value_min=0, value_max=100)
config_option_float(option_name="tslmaxpcnt", option_default=-5.0, store_name="tsl_max_pcnt", value_min=-100, value_max=0)
config_option_float(option_name="buyminsize", option_default=0.0, store_name="buyminsize")
config_option_float(option_name="buymaxsize", option_default=0.0, store_name="buymaxsize")
config_option_bool(option_name="buylastsellsize", option_default=False, store_name="buylastsellsize", store_invert=False)
config_option_bool(option_name="marketmultibuycheck", option_default=False, store_name="marketmultibuycheck", store_invert=False)
config_option_bool(option_name="buynearhigh", option_default=True, store_name="disablebuynearhigh", store_invert=True)
config_option_float(option_name="buynearhighpcnt", option_default=3.0, store_name="nobuynearhighpcnt", value_min=0, value_max=100)
config_option_float(option_name="trailingbuypcnt", option_default=0.0, store_name="trailingbuypcnt", value_min=0, value_max=100)
config_option_bool(option_name="trailingimmediatebuy", option_default=False, store_name="trailingimmediatebuy", store_invert=False)
config_option_float(option_name="trailingbuyimmediatepcnt", option_default=0.0, store_name="trailingbuyimmediatepcnt", value_min=0, value_max=100)
config_option_bool(option_name="selltriggeroverride", option_default=False, store_name="selltriggeroverride", store_invert=False)
config_option_bool(option_name="ema1226", option_default=True, store_name="disablebuyema", store_invert=True)
config_option_bool(option_name="macdsignal", option_default=True, store_name="disablebuymacd", store_invert=True)
config_option_bool(option_name="obv", option_default=False, store_name="disablebuyobv", store_invert=True)
config_option_bool(option_name="elderray", option_default=False, store_name="disablebuyelderray", store_invert=True)
config_option_bool(option_name="bbands_s1", option_default=False, store_name="disablebuybbands_s1", store_invert=True)
config_option_bool(option_name="bbands_s2", option_default=False, store_name="disablebuybbands_s2", store_invert=True)
def parser(app, binance_config, args={}):
if not app:
raise Exception("No app is passed")
if isinstance(binance_config, dict):
if "api_key" in binance_config or "api_secret" in binance_config:
print(">>> migrating api keys to binance.key <<<\n")
# create 'binance.key'
fh = open("binance.key", "w")
fh.write(f"{binance_config['api_key']}\n{binance_config['api_secret']}")
fh.close()
if os.path.isfile("config.json") and os.path.isfile("binance.key"):
binance_config["api_key_file"] = binance_config.pop("api_key")
binance_config["api_key_file"] = "binance.key"
del binance_config["api_secret"]
# read 'binance' element from config.json
fh = open("config.json", "r")
config_json = ast.literal_eval(fh.read())
config_json["binance"] = binance_config
fh.close()
# write new 'binance' element
fh = open("config.json", "w")
fh.write(json.dumps(config_json, indent=4))
fh.close()
else:
print("migration failed (io error)\n")
app.api_key_file = "binance.key"
if "api_key_file" in args and args["api_key_file"] is not None:
app.api_key_file = args["api_key_file"]
elif "api_key_file" in binance_config:
app.api_key_file = binance_config["api_key_file"]
if app.api_key_file is not None:
if not os.path.isfile(app.api_key_file):
try:
raise Exception(f"Unable to read {app.api_key_file}, please check the file exists and is readable. Remove it from the config file for test mode!\n")
except Exception as e:
print(f"{type(e).__name__}: {e}")
sys.exit(1)
else:
try:
with open(app.api_key_file, "r") as f:
key = f.readline().strip()
secret = f.readline().strip()
binance_config["api_key"] = key
binance_config["api_secret"] = secret
except Exception:
raise RuntimeError(f"Unable to read {app.api_key_file}")
if "api_key" in binance_config and "api_secret" in binance_config and "api_url" in binance_config:
# validates the api key is syntactically correct
p = re.compile(r"^[A-z0-9]{64,64}$")
if not p.match(binance_config["api_key"]):
raise TypeError("Binance API key is invalid")
app.api_key = binance_config["api_key"] # noqa: F841
# validates the api secret is syntactically correct
p = re.compile(r"^[A-z0-9]{64,64}$")
if not p.match(binance_config["api_secret"]):
raise TypeError("Binance API secret is invalid")
app.api_secret = binance_config["api_secret"] # noqa: F841
valid_urls = [
"https://api.binance.com/",
"https://testnet.binance.vision/",
"https://api.binance.com",
"https://testnet.binance.vision",
"https://api.binance.us",
]
# validate Binance API
if binance_config["api_url"] not in valid_urls:
raise ValueError("Binance API URL is invalid")
app.api_url = binance_config["api_url"] # noqa: F841
app.base_currency = "BTC"
app.quote_currency = "GBP"
else:
binance_config = {}
config = merge_config_and_args(binance_config, args)
default_config_parse(app, config)
if "base_currency" in config and config["base_currency"] is not None:
if not is_currency_valid(config["base_currency"]):
raise TypeError("Base currency is invalid.")
app.base_currency = config["base_currency"]
if "quote_currency" in config and config["quote_currency"] is not None:
if not is_currency_valid(config["quote_currency"]):
raise TypeError("Quote currency is invalid.")
app.quote_currency = config["quote_currency"]
if "market" in config and config["market"] is not None:
app.market, app.base_currency, app.quote_currency = parse_market(config["market"])
if app.base_currency != "" and app.quote_currency != "":
app.market = app.base_currency + app.quote_currency # noqa: F841
if "use_sell_fee" in config:
app.use_sell_fee = config["use_sell_fee"] # noqa: F841 | null |
168,160 | import re
import ast
import json
import os.path
import sys
from .default_parser import is_currency_valid, default_config_parse, merge_config_and_args
from models.exchange.Granularity import Granularity
def parse_market(market):
if not is_market_valid(market):
raise ValueError("Kucoin market invalid: " + market)
base_currency, quote_currency = market.split("-", 2)
return market, base_currency, quote_currency
def merge_config_and_args(exchange_config, args):
new_config = {}
if "config" in exchange_config and exchange_config["config"] is not None:
new_config = {**exchange_config["config"]}
for (key, value) in args.items():
if value is not None and value is not False:
new_config[key] = value
return new_config
def is_currency_valid(currency):
p = re.compile(r"^[0-9A-Z]{1,20}$")
return p.match(currency)
def default_config_parse(app, config):
"""
Requirements for bot options:
- Update _generate_banner() in controllers/PyCryptoBot.py
- Update the command line arguments below
- Update the config parser in models/config/default_parser.py
"""
def config_option_int(option_name: str = None, option_default: int = 0, store_name: str = None, value_min: int = None, value_max: int = None) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
if option_name in config:
if isinstance(config[option_name], int):
if value_min is not None and value_max is not None:
if config[option_name] >= value_min and config[option_name] <= value_max:
setattr(app, store_name, int(config[option_name]))
else:
raise TypeError(f"{option_name} is out of bounds")
else:
setattr(app, store_name, int(config[option_name]))
else:
raise TypeError(f"{option_name} must be a number")
else:
setattr(app, store_name, option_default) # default
return True
def config_option_float(
option_name: str = None, option_default: float = 0.0, store_name: str = None, value_min: float = None, value_max: float = None
) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
if option_name in config:
if isinstance(config[option_name], int) or isinstance(config[option_name], float):
if value_min is not None and value_max is not None:
if config[option_name] >= value_min and config[option_name] <= value_max:
setattr(app, store_name, float(config[option_name]))
else:
raise TypeError(f"{option_name} is out of bounds")
else:
setattr(app, store_name, float(config[option_name]))
else:
raise TypeError(f"{option_name} must be a number")
else:
setattr(app, store_name, option_default) # default
return True
def config_option_bool(option_name: str = None, option_default: bool = True, store_name: str = None, store_invert: bool = False) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
store_invert = False # legacy config does not need to be inverted
if option_name in config:
if isinstance(config[option_name], int):
if config[option_name] in [0, 1]:
if store_invert is True:
setattr(app, store_name, bool(not config[option_name]))
else:
setattr(app, store_name, bool(config[option_name]))
else:
raise TypeError(f"{option_name} must be of type int (0 or 1)")
else:
if store_invert is True:
setattr(app, store_name, (not option_default)) # default (if inverted - disabled)
else:
setattr(app, store_name, option_default) # default
return True
import sys
def config_option_list(option_name: str = None, option_default: str = "", store_name: str = None) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
if option_name in config:
if isinstance(config[option_name], list):
setattr(app, store_name, config[option_name])
else:
raise TypeError(f"{option_name} must be a list")
else:
setattr(app, store_name, option_default) # default
return True
def config_option_str(option_name: str = None, option_default: str = "", store_name: str = None, valid_options: list = [], disable_variable=None) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
if option_name in config:
if isinstance(config[option_name], str):
if config[option_name] in valid_options:
setattr(app, store_name, config[option_name])
if disable_variable is not None:
setattr(app, disable_variable, 0)
else:
raise TypeError(f"{option_name} is not a valid option")
else:
raise TypeError(f"{option_name} must be a string")
else:
setattr(app, store_name, option_default) # default
return True
def config_option_date(
option_name: str = None, option_default: str = "", store_name: str = None, date_format: str = "%Y-%m-%d", allow_now: bool = False
) -> bool:
if option_name is None or store_name is None:
return False
if store_name in config:
option_name = store_name # prefer legacy config if it exists
if option_name in config:
if isinstance(config[option_name], str):
if allow_now is True and config[option_name] == "now":
setattr(app, store_name, str(datetime.today().strftime("%Y-%m-%d")))
else:
try:
datetime.strptime(config[option_name], date_format)
except ValueError:
raise ValueError(f"Incorrect data format, should be {date_format}")
setattr(app, store_name, str(config[option_name]))
else:
raise TypeError(f"{option_name} must be a date: {date_format}")
else:
setattr(app, store_name, option_default) # default
return True
# bespoke options with non-standard logic
if "market" in config and config["market"] is not None:
if app.exchange == Exchange.BINANCE:
p = re.compile(r"^[0-9A-Z]{4,25}$")
if p.match(config["market"]):
app.market = config["market"]
else:
# default market for Binance
app.market = "BTCGBP"
else:
if app.exchange != Exchange.COINBASE and app.exchange != Exchange.COINBASEPRO and app.exchange != Exchange.KUCOIN:
# default if no exchange set
app.exchange = Exchange.COINBASEPRO
# binance and kucoin
p = re.compile(r"^[0-9A-Z]{1,20}\-[1-9A-Z]{2,5}$")
if p.match(config["market"]):
app.market = config["market"]
else:
# default for coinbase pro and binance
app.market = "BTC-GBP"
if "granularity" in config and config["granularity"] is not None:
app.smart_switch = 0
if isinstance(config["granularity"], str) and not config["granularity"].isnumeric() is True:
app.granularity = Granularity.convert_to_enum(config["granularity"])
else:
app.granularity = Granularity.convert_to_enum(int(config["granularity"]))
# standard options
try:
term_width = get_terminal_size().columns
except OSError:
term_width = 180
config_option_bool(option_name="debug", option_default=False, store_name="debug", store_invert=False)
config_option_bool(option_name="termcolor", option_default=True, store_name="term_color", store_invert=False)
config_option_int(option_name="termwidth", option_default=term_width, store_name="term_width", value_min=60, value_max=420)
config_option_int(option_name="logwidth", option_default=180, store_name="log_width", value_min=60, value_max=420)
config_option_bool(option_name="live", option_default=False, store_name="is_live", store_invert=False)
config_option_bool(option_name="graphs", option_default=False, store_name="save_graphs", store_invert=False)
config_option_str(
option_name="sim", option_default=0, store_name="is_sim", valid_options=["slow", "fast", "slow-sample", "fast-sample"], disable_variable="is_live"
)
config_option_date(option_name="simstartdate", option_default=None, store_name="simstartdate", date_format="%Y-%m-%d", allow_now=False)
config_option_date(option_name="simenddate", option_default=None, store_name="simenddate", date_format="%Y-%m-%d", allow_now=True)
config_option_bool(option_name="simresultonly", option_default=False, store_name="simresultonly", store_invert=False)
config_option_bool(option_name="telegram", option_default=False, store_name="disabletelegram", store_invert=True)
config_option_bool(option_name="telegrambotcontrol", option_default=False, store_name="telegrambotcontrol", store_invert=False)
config_option_bool(option_name="telegramtradesonly", option_default=False, store_name="telegramtradesonly", store_invert=False)
config_option_bool(option_name="telegramerrormsgs", option_default=False, store_name="disabletelegramerrormsgs", store_invert=True)
config_option_bool(option_name="stats", option_default=False, store_name="stats", store_invert=False)
config_option_list(option_name="statgroup", option_default="", store_name="statgroup")
config_option_date(option_name="statstartdate", option_default=None, store_name="statstartdate", date_format="%Y-%m-%d", allow_now=False)
config_option_bool(option_name="statdetail", option_default=False, store_name="statdetail", store_invert=False)
config_option_bool(option_name="log", option_default=True, store_name="disablelog", store_invert=True)
config_option_bool(option_name="smartswitch", option_default=False, store_name="smart_switch", store_invert=False)
config_option_bool(option_name="tradetracker", option_default=False, store_name="disabletracker", store_invert=True)
config_option_bool(option_name="autorestart", option_default=False, store_name="autorestart", store_invert=False)
config_option_bool(option_name="websocket", option_default=False, store_name="websocket", store_invert=False)
config_option_bool(option_name="insufficientfundslogging", option_default=False, store_name="enableinsufficientfundslogging", store_invert=False)
config_option_bool(option_name="logbuysellinjson", option_default=False, store_name="logbuysellinjson", store_invert=False)
config_option_bool(option_name="manualtradesonly", option_default=False, store_name="manual_trades_only", store_invert=False)
config_option_str(option_name="startmethod", option_default="standard", store_name="startmethod", valid_options=["scanner", "standard", "telegram"])
config_option_int(option_name="recvwindow", option_default=5000, store_name="recv_window", value_min=5000, value_max=60000)
config_option_str(option_name="lastaction", option_default=None, store_name="last_action", valid_options=["BUY", "SELL"])
config_option_bool(option_name="kucoincache", option_default=False, store_name="usekucoincache", store_invert=False)
config_option_bool(option_name="exitaftersell", option_default=False, store_name="exitaftersell", store_invert=False)
config_option_int(option_name="adjusttotalperiods", option_default=300, store_name="adjusttotalperiods", value_min=200, value_max=500)
config_option_float(option_name="buypercent", option_default=100, store_name="buypercent", value_min=0, value_max=100)
config_option_float(option_name="sellpercent", option_default=100, store_name="sellpercent", value_min=0, value_max=100)
config_option_float(option_name="sellupperpcnt", option_default=None, store_name="sell_upper_pcnt", value_min=0, value_max=100)
config_option_float(option_name="selllowerpcnt", option_default=None, store_name="sell_lower_pcnt", value_min=-100, value_max=0)
config_option_float(option_name="nosellmaxpcnt", option_default=None, store_name="nosellmaxpcnt", value_min=0, value_max=100)
config_option_float(option_name="nosellminpcnt", option_default=None, store_name="nosellminpcnt", value_min=-100, value_max=0)
config_option_bool(option_name="preventloss", option_default=False, store_name="preventloss", store_invert=False)
config_option_float(option_name="preventlosstrigger", option_default=1.0, store_name="preventlosstrigger", value_min=0, value_max=100)
config_option_float(option_name="preventlossmargin", option_default=0.1, store_name="preventlossmargin", value_min=0, value_max=100)
config_option_bool(option_name="sellatloss", option_default=True, store_name="sellatloss", store_invert=False)
config_option_bool(option_name="sellatresistance", option_default=False, store_name="sellatresistance", store_invert=False)
config_option_bool(option_name="sellatfibonaccilow", option_default=False, store_name="disablefailsafefibonaccilow", store_invert=True)
config_option_bool(option_name="bullonly", option_default=False, store_name="disablebullonly", store_invert=True)
config_option_bool(option_name="profitbankreversal", option_default=False, store_name="disableprofitbankreversal", store_invert=True)
config_option_float(option_name="trailingstoploss", option_default=0.0, store_name="trailing_stop_loss", value_min=-100, value_max=0)
config_option_float(option_name="trailingstoplosstrigger", option_default=0.0, store_name="trailing_stop_loss_trigger", value_min=0, value_max=100)
config_option_float(option_name="trailingsellpcnt", option_default=0.0, store_name="trailingsellpcnt", value_min=-100, value_max=0)
config_option_bool(option_name="trailingimmediatesell", option_default=False, store_name="trailingimmediatesell", store_invert=False)
config_option_float(option_name="trailingsellimmediatepcnt", option_default=0.0, store_name="trailingsellimmediatepcnt", value_min=-100, value_max=0)
config_option_float(option_name="trailingsellbailoutpcnt", option_default=0.0, store_name="trailingsellbailoutpcnt", value_min=-100, value_max=100)
config_option_bool(option_name="dynamictsl", option_default=False, store_name="dynamic_tsl", store_invert=False)
config_option_float(option_name="tslmultiplier", option_default=1.1, store_name="tsl_multiplier", value_min=0, value_max=100)
config_option_float(option_name="tsltriggermultiplier", option_default=1.1, store_name="tsl_trigger_multiplier", value_min=0, value_max=100)
config_option_float(option_name="tslmaxpcnt", option_default=-5.0, store_name="tsl_max_pcnt", value_min=-100, value_max=0)
config_option_float(option_name="buyminsize", option_default=0.0, store_name="buyminsize")
config_option_float(option_name="buymaxsize", option_default=0.0, store_name="buymaxsize")
config_option_bool(option_name="buylastsellsize", option_default=False, store_name="buylastsellsize", store_invert=False)
config_option_bool(option_name="marketmultibuycheck", option_default=False, store_name="marketmultibuycheck", store_invert=False)
config_option_bool(option_name="buynearhigh", option_default=True, store_name="disablebuynearhigh", store_invert=True)
config_option_float(option_name="buynearhighpcnt", option_default=3.0, store_name="nobuynearhighpcnt", value_min=0, value_max=100)
config_option_float(option_name="trailingbuypcnt", option_default=0.0, store_name="trailingbuypcnt", value_min=0, value_max=100)
config_option_bool(option_name="trailingimmediatebuy", option_default=False, store_name="trailingimmediatebuy", store_invert=False)
config_option_float(option_name="trailingbuyimmediatepcnt", option_default=0.0, store_name="trailingbuyimmediatepcnt", value_min=0, value_max=100)
config_option_bool(option_name="selltriggeroverride", option_default=False, store_name="selltriggeroverride", store_invert=False)
config_option_bool(option_name="ema1226", option_default=True, store_name="disablebuyema", store_invert=True)
config_option_bool(option_name="macdsignal", option_default=True, store_name="disablebuymacd", store_invert=True)
config_option_bool(option_name="obv", option_default=False, store_name="disablebuyobv", store_invert=True)
config_option_bool(option_name="elderray", option_default=False, store_name="disablebuyelderray", store_invert=True)
config_option_bool(option_name="bbands_s1", option_default=False, store_name="disablebuybbands_s1", store_invert=True)
config_option_bool(option_name="bbands_s2", option_default=False, store_name="disablebuybbands_s2", store_invert=True)
class Granularity(Enum):
ONE_MINUTE = 60, "1m", "1min", "1T"
FIVE_MINUTES = 300, "5m", "5min", "5T"
FIFTEEN_MINUTES = 900, "15m", "15min", "15T"
THIRTY_MINUTES = 1800, "30m", "30min", "30T"
ONE_HOUR = 3600, "1h", "1hour", "1H"
SIX_HOURS = 21600, "6h", "6hour", "6H"
ONE_DAY = 86400, "1d", "1day", "1D"
def __init__(self, integer, short, medium, frequency):
self.integer = integer
self.short = short
self.medium = medium
self.frequency = frequency
def convert_to_enum(value):
for granularity in Granularity:
for enum_value in granularity.value:
if enum_value == value:
return granularity
raise ValueError("Invalid Granularity")
def to_short(self):
return self.short
def to_integer(self):
return self.integer
def to_medium(self):
return self.medium
def get_frequency(self):
return self.frequency
def parser(app, kucoin_config, args={}):
if not app:
raise Exception("No app is passed")
if isinstance(kucoin_config, dict):
if "api_key" in kucoin_config or "api_secret" in kucoin_config or "api_passphrase" in kucoin_config:
print(">>> migrating api keys to kucoin.key <<<", "\n")
# create 'kucoin.key'
fh = open("kucoin.key", "w", encoding="utf8")
fh.write(kucoin_config["api_key"] + "\n" + kucoin_config["api_secret"] + "\n" + kucoin_config["api_passphrase"])
fh.close()
if os.path.isfile("config.json") and os.path.isfile("kucoin.key"):
kucoin_config["api_key_file"] = kucoin_config.pop("api_key")
kucoin_config["api_key_file"] = "kucoin.key"
del kucoin_config["api_secret"]
del kucoin_config["api_passphrase"]
# read 'Kucoin' element from config.json
fh = open("config.json", "r", encoding="utf8")
config_json = ast.literal_eval(fh.read())
config_json["kucoin"] = kucoin_config
fh.close()
# write new 'Kucoin' element
fh = open("config.json", "w")
fh.write(json.dumps(config_json, indent=4))
fh.close()
else:
print("migration failed (io error)", "\n")
app.api_key_file = "kucoin.key"
if "api_key_file" in args and args["api_key_file"] is not None:
app.api_key_file = args["api_key_file"]
elif "api_key_file" in kucoin_config:
app.api_key_file = kucoin_config["api_key_file"]
if app.api_key_file is not None:
if not os.path.isfile(app.api_key_file):
try:
raise Exception(f"Unable to read {app.api_key_file}, please check the file exists and is readable. Remove \"api_key_file\" key from the config file for test mode!\n")
except Exception as e:
print(f"{type(e).__name__}: {e}")
sys.exit(1)
else:
try:
with open(app.api_key_file, "r") as f:
key = f.readline().strip()
secret = f.readline().strip()
password = f.readline().strip()
kucoin_config["api_key"] = key
kucoin_config["api_secret"] = secret
kucoin_config["api_passphrase"] = password
except Exception:
raise RuntimeError(f"Unable to read {app.api_key_file}")
if "api_key" in kucoin_config and "api_secret" in kucoin_config and "api_passphrase" in kucoin_config and "api_url" in kucoin_config:
# validates the api key is syntactically correct
p = re.compile(r"^[A-z0-9]{24,24}$")
if not p.match(kucoin_config["api_key"]):
raise TypeError("Kucoin API key is invalid")
app.api_key = kucoin_config["api_key"] # noqa: F841
# validates the api secret is syntactically correct
p = re.compile(r"^[A-z0-9-]{36,36}$")
if not p.match(kucoin_config["api_secret"]):
raise TypeError("Kucoin API secret is invalid")
app.api_secret = kucoin_config["api_secret"] # noqa: F841
# validates the api passphrase is syntactically correct
p = re.compile(r"^[A-z0-9#$%=@!{},`~&*()<>?.:;_|^/+\[\]]{8,32}$")
if not p.match(kucoin_config["api_passphrase"]):
raise TypeError("Kucoin API passphrase is invalid")
app.api_passphrase = kucoin_config["api_passphrase"] # noqa: F841
valid_urls = [
"https://api.kucoin.com/",
"https://api.kucoin.com",
"https://openapi-sandbox.kucoin.com/",
"https://openapi-sandbox.kucoin.com",
]
# validate Kucoin API
if kucoin_config["api_url"] not in valid_urls:
raise ValueError("Kucoin API URL is invalid")
api_url = kucoin_config["api_url"] # noqa: F841
app.base_currency = "BTC"
app.quote_currency = "GBP"
else:
kucoin_config = {}
config = merge_config_and_args(kucoin_config, args)
default_config_parse(app, config)
if "base_currency" in config and config["base_currency"] is not None:
if not is_currency_valid(config["base_currency"]):
raise TypeError("Base currency is invalid.")
app.base_currency = config["base_currency"]
if "quote_currency" in config and config["quote_currency"] is not None:
if not is_currency_valid(config["quote_currency"]):
raise TypeError("Quote currency is invalid.")
app.quote_currency = config["quote_currency"]
if "market" in config and config["market"] is not None:
app.market, app.base_currency, app.quote_currency = parse_market(config["market"])
if app.base_currency != "" and app.quote_currency != "":
app.market = app.base_currency + "-" + app.quote_currency # noqa: F841
if "granularity" in config and config["granularity"] is not None:
if isinstance(config["granularity"], str) and config["granularity"].isnumeric() is True:
app.granularity = Granularity.convert_to_enum(int(config["granularity"]))
elif isinstance(config["granularity"], int):
app.granularity = Granularity.convert_to_enum(config["granularity"]) # noqa: F841 | null |
168,161 | import math
from typing import Union
def truncate(f: Union[int, float], n: Union[int, float]) -> str:
"""
Format a given number ``f`` with a given precision ``n``.
"""
if not isinstance(f, int) and not isinstance(f, float):
return "0.0"
if not isinstance(n, int) and not isinstance(n, float):
return "0.0"
if (f < 0.0001) and n >= 5:
return f"{f:.5f}"
# `{n}` inside the actual format honors the precision
return f"{math.floor(f * 10 ** n) / 10 ** n:.{n}f}"
The provided code snippet includes necessary dependencies for implementing the `compare` function. Write a Python function `def compare(val1, val2, label="", precision=2)` to solve the following problem:
Compare two values and print a message if they are not equal.
Here is the function:
def compare(val1, val2, label="", precision=2):
"""
Compare two values and print a message if they are not equal.
"""
if val1 > val2:
if label == "":
return f"{truncate(val1, precision)} > {truncate(val2, precision)}"
else:
return f"{label}: {truncate(val1, precision)} > {truncate(val2, precision)}"
if val1 < val2:
if label == "":
return f"{truncate(val1, precision)} < {truncate(val2, precision)}"
else:
return f"{label}: {truncate(val1, precision)} < {truncate(val2, precision)}"
else:
if label == "":
return f"{truncate(val1, precision)} = {truncate(val2, precision)}"
else:
return f"{label}: {truncate(val1, precision)} = {truncate(val2, precision)}" | Compare two values and print a message if they are not equal. |
168,162 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from disentanglement_lib.evaluation import evaluate
from disentanglement_lib.evaluation.metrics import utils
from disentanglement_lib.methods.unsupervised import train
from disentanglement_lib.methods.unsupervised import vae
from disentanglement_lib.postprocessing import postprocess
from disentanglement_lib.utils import aggregate_results
import tensorflow.compat.v1 as tf
import gin.tf
The provided code snippet includes necessary dependencies for implementing the `compute_custom_metric` function. Write a Python function `def compute_custom_metric(ground_truth_data, representation_function, random_state, num_train=gin.REQUIRED, batch_size=16)` to solve the following problem:
Example of a custom (dummy) metric. Preimplemented metrics can be found in disentanglement_lib.evaluation.metrics. Args: ground_truth_data: GroundTruthData to be sampled from. representation_function: Function that takes observations as input and outputs a dim_representation sized representation for each observation. random_state: Numpy random state used for randomness. num_train: Number of points used for training. batch_size: Batch size for sampling. Returns: Dict with disentanglement score.
Here is the function:
def compute_custom_metric(ground_truth_data,
representation_function,
random_state,
num_train=gin.REQUIRED,
batch_size=16):
"""Example of a custom (dummy) metric.
Preimplemented metrics can be found in disentanglement_lib.evaluation.metrics.
Args:
ground_truth_data: GroundTruthData to be sampled from.
representation_function: Function that takes observations as input and
outputs a dim_representation sized representation for each observation.
random_state: Numpy random state used for randomness.
num_train: Number of points used for training.
batch_size: Batch size for sampling.
Returns:
Dict with disentanglement score.
"""
score_dict = {}
# This is how to obtain the representations of num_train points along with the
# ground-truth factors of variation.
representation, factors_of_variations = utils.generate_batch_factor_code(
ground_truth_data, representation_function, num_train, random_state,
batch_size)
# We could now compute a metric based on representation and
# factors_of_variations. However, for the sake of brevity, we just return 1.
del representation, factors_of_variations
score_dict["custom_metric"] = 1.
return score_dict | Example of a custom (dummy) metric. Preimplemented metrics can be found in disentanglement_lib.evaluation.metrics. Args: ground_truth_data: GroundTruthData to be sampled from. representation_function: Function that takes observations as input and outputs a dim_representation sized representation for each observation. random_state: Numpy random state used for randomness. num_train: Number of points used for training. batch_size: Batch size for sampling. Returns: Dict with disentanglement score. |
168,163 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from disentanglement_lib.data.ground_truth import ground_truth_data
from disentanglement_lib.data.ground_truth import util
import numpy as np
import PIL
from six.moves import range
from six.moves import zip
import tensorflow.compat.v1 as tf
def _load_chunks(path_template, chunk_names):
"""Loads several chunks of the small norb data set into lists."""
list_of_images = []
list_of_features = []
for chunk_name in chunk_names:
norb = _read_binary_matrix(path_template.format(chunk_name, "dat"))
list_of_images.append(_resize_images(norb[:, 0]))
norb_class = _read_binary_matrix(path_template.format(chunk_name, "cat"))
norb_info = _read_binary_matrix(path_template.format(chunk_name, "info"))
list_of_features.append(np.column_stack((norb_class, norb_info)))
return list_of_images, list_of_features
The provided code snippet includes necessary dependencies for implementing the `_load_small_norb_chunks` function. Write a Python function `def _load_small_norb_chunks(path_template, chunk_names)` to solve the following problem:
Loads several chunks of the small norb data set for final use.
Here is the function:
def _load_small_norb_chunks(path_template, chunk_names):
"""Loads several chunks of the small norb data set for final use."""
list_of_images, list_of_features = _load_chunks(path_template, chunk_names)
features = np.concatenate(list_of_features, axis=0)
features[:, 3] = features[:, 3] / 2 # azimuth values are 0, 2, 4, ..., 24
return np.concatenate(list_of_images, axis=0), features | Loads several chunks of the small norb data set for final use. |
168,164 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from disentanglement_lib.data.ground_truth import ground_truth_data
from disentanglement_lib.data.ground_truth import util
import numpy as np
import PIL
import scipy.io as sio
from six.moves import range
from sklearn.utils import extmath
from tensorflow.compat.v1 import gfile
CARS3D_PATH = os.path.join(
os.environ.get("DISENTANGLEMENT_LIB_DATA", "."), "cars")
The provided code snippet includes necessary dependencies for implementing the `_load_mesh` function. Write a Python function `def _load_mesh(filename)` to solve the following problem:
Parses a single source file and rescales contained images.
Here is the function:
def _load_mesh(filename):
"""Parses a single source file and rescales contained images."""
with gfile.Open(os.path.join(CARS3D_PATH, filename), "rb") as f:
mesh = np.einsum("abcde->deabc", sio.loadmat(f)["im"])
flattened_mesh = mesh.reshape((-1,) + mesh.shape[2:])
rescaled_mesh = np.zeros((flattened_mesh.shape[0], 64, 64, 3))
for i in range(flattened_mesh.shape[0]):
pic = PIL.Image.fromarray(flattened_mesh[i, :, :, :])
pic.thumbnail((64, 64, 3), PIL.Image.ANTIALIAS)
rescaled_mesh[i, :, :, :] = np.array(pic)
return rescaled_mesh * 1. / 255 | Parses a single source file and rescales contained images. |
168,165 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import time
from disentanglement_lib.data.ground_truth import named_data
from disentanglement_lib.evaluation.metrics import beta_vae
from disentanglement_lib.evaluation.metrics import dci
from disentanglement_lib.evaluation.metrics import downstream_task
from disentanglement_lib.evaluation.metrics import factor_vae
from disentanglement_lib.evaluation.metrics import mig
from disentanglement_lib.evaluation.metrics import modularity_explicitness
from disentanglement_lib.evaluation.metrics import sap_score
from disentanglement_lib.evaluation.metrics import unsupervised_metrics
from disentanglement_lib.methods.semi_supervised import semi_supervised_utils
from disentanglement_lib.utils import results
import numpy as np
import tensorflow.compat.v1 as tf
import tensorflow_hub as hub
import gin.tf
def validate(model_dir,
output_dir,
overwrite=False,
validation_fn=gin.REQUIRED,
random_seed=gin.REQUIRED,
num_labelled_samples=gin.REQUIRED,
name=""):
"""Loads a representation TFHub module and computes disentanglement metrics.
Args:
model_dir: String with path to directory where the representation function
is saved.
output_dir: String with the path where the results should be saved.
overwrite: Boolean indicating whether to overwrite output directory.
validation_fn: Function used to validate the representation (see metrics/
for examples).
random_seed: Integer with random seed used for training.
num_labelled_samples: How many labelled samples are available.
name: Optional string with name of the metric (can be used to name metrics).
"""
# We do not use the variable 'name'. Instead, it can be used to name scores
# as it will be part of the saved gin config.
del name
# Delete the output directory if it already exists.
if tf.gfile.IsDirectory(output_dir):
if overwrite:
tf.gfile.DeleteRecursively(output_dir)
else:
raise ValueError("Directory already exists and overwrite is False.")
# Set up time to keep track of elapsed time in results.
experiment_timer = time.time()
# Automatically set the proper data set if necessary. We replace the active
# gin config as this will lead to a valid gin config file where the data set
# is present.
if gin.query_parameter("dataset.name") == "auto":
# Obtain the dataset name from the gin config of the previous step.
gin_config_file = os.path.join(model_dir, "results", "gin",
"postprocess.gin")
gin_dict = results.gin_dict(gin_config_file)
with gin.unlock_config():
gin.bind_parameter("dataset.name", gin_dict["dataset.name"].replace(
"'", ""))
dataset = named_data.get_named_ground_truth_data()
observations, labels, _ = semi_supervised_utils.sample_supervised_data(
random_seed, dataset, num_labelled_samples)
# Path to TFHub module of previously trained representation.
module_path = os.path.join(model_dir, "tfhub")
with hub.eval_function_for_module(module_path) as f:
def _representation_function(x):
"""Computes representation vector for input images."""
output = f(dict(images=x), signature="representation", as_dict=True)
return np.array(output["default"])
# Computes scores of the representation based on the evaluation_fn.
results_dict = validation_fn(observations, np.transpose(labels),
_representation_function)
# Save the results (and all previous results in the pipeline) on disk.
original_results_dir = os.path.join(model_dir, "results")
results_dir = os.path.join(output_dir, "results")
results_dict["elapsed_time"] = time.time() - experiment_timer
results.update_result_directory(results_dir, "validation", results_dict,
original_results_dir)
The provided code snippet includes necessary dependencies for implementing the `validate_with_gin` function. Write a Python function `def validate_with_gin(model_dir, output_dir, overwrite=False, gin_config_files=None, gin_bindings=None)` to solve the following problem:
Validate a representation based on the provided gin configuration. This function will set the provided gin bindings, call the evaluate() function and clear the gin config. Please see the evaluate() for required gin bindings. Args: model_dir: String with path to directory where the representation is saved. output_dir: String with the path where the evaluation should be saved. overwrite: Boolean indicating whether to overwrite output directory. gin_config_files: List of gin config files to load. gin_bindings: List of gin bindings to use.
Here is the function:
def validate_with_gin(model_dir,
output_dir,
overwrite=False,
gin_config_files=None,
gin_bindings=None):
"""Validate a representation based on the provided gin configuration.
This function will set the provided gin bindings, call the evaluate()
function and clear the gin config. Please see the evaluate() for required
gin bindings.
Args:
model_dir: String with path to directory where the representation is saved.
output_dir: String with the path where the evaluation should be saved.
overwrite: Boolean indicating whether to overwrite output directory.
gin_config_files: List of gin config files to load.
gin_bindings: List of gin bindings to use.
"""
if gin_config_files is None:
gin_config_files = []
if gin_bindings is None:
gin_bindings = []
gin.parse_config_files_and_bindings(gin_config_files, gin_bindings)
validate(model_dir, output_dir, overwrite)
gin.clear_config() | Validate a representation based on the provided gin configuration. This function will set the provided gin bindings, call the evaluate() function and clear the gin config. Please see the evaluate() for required gin bindings. Args: model_dir: String with path to directory where the representation is saved. output_dir: String with the path where the evaluation should be saved. overwrite: Boolean indicating whether to overwrite output directory. gin_config_files: List of gin config files to load. gin_bindings: List of gin bindings to use. |
168,166 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import time
from disentanglement_lib.data.ground_truth import named_data
from disentanglement_lib.methods.unsupervised import gaussian_encoder_model
from disentanglement_lib.methods.weak import weak_vae
from disentanglement_lib.utils import results
from disentanglement_lib.visualize import visualize_util
import numpy as np
import tensorflow as tf
import gin.tf.external_configurables
import gin.tf
from tensorflow_estimator.python.estimator.tpu import tpu_config
from tensorflow_estimator.python.estimator.tpu.tpu_estimator import TPUEstimator
def train(model_dir,
overwrite=False,
model=gin.REQUIRED,
training_steps=gin.REQUIRED,
random_seed=gin.REQUIRED,
batch_size=gin.REQUIRED,
name=""):
"""Trains the estimator and exports the snapshot and the gin config.
The use of this function requires the gin binding 'dataset.name' to be
specified as that determines the data set used for training.
Args:
model_dir: String with path to directory where model output should be saved.
overwrite: Boolean indicating whether to overwrite output directory.
model: GaussianEncoderModel that should be trained and exported.
training_steps: Integer with number of training steps.
random_seed: Integer with random seed used for training.
batch_size: Integer with the batch size.
name: Optional string with name of the model (can be used to name models).
"""
# We do not use the variable 'name'. Instead, it can be used to name results
# as it will be part of the saved gin config.
del name
# Delete the output directory if necessary.
if tf.compat.v1.gfile.IsDirectory(model_dir):
if overwrite:
tf.compat.v1.gfile.DeleteRecursively(model_dir)
else:
raise ValueError("Directory already exists and overwrite is False.")
# Create a numpy random state. We will sample the random seeds for training
# and evaluation from this.
random_state = np.random.RandomState(random_seed)
# Obtain the dataset.
dataset = named_data.get_named_ground_truth_data()
# We create a TPUEstimator based on the provided model. This is primarily so
# that we could switch to TPU training in the future. For now, we train
# locally on GPUs.
run_config = tpu_config.RunConfig(
tf_random_seed=random_seed,
keep_checkpoint_max=1,
tpu_config=tpu_config.TPUConfig(iterations_per_loop=500))
tpu_estimator = TPUEstimator(
use_tpu=False,
model_fn=model.model_fn,
model_dir=model_dir,
train_batch_size=batch_size,
eval_batch_size=batch_size,
config=run_config)
# Set up time to keep track of elapsed time in results.
experiment_timer = time.time()
# Do the actual training.
tpu_estimator.train(
input_fn=_make_input_fn(dataset, random_state.randint(2**32)),
steps=training_steps)
# Save model as a TFHub module.
output_shape = named_data.get_named_ground_truth_data().observation_shape
module_export_path = os.path.join(model_dir, "tfhub")
gaussian_encoder_model.export_as_tf_hub(model, output_shape,
tpu_estimator.latest_checkpoint(),
module_export_path)
# Save the results. The result dir will contain all the results and config
# files that we copied along, as we progress in the pipeline. The idea is that
# these files will be available for analysis at the end.
results_dict = tpu_estimator.evaluate(
input_fn=_make_input_fn(
dataset, random_state.randint(2**32), num_batches=1000
))
results_dir = os.path.join(model_dir, "results")
results_dict["elapsed_time"] = time.time() - experiment_timer
results.update_result_directory(results_dir, "train", results_dict)
visualize_dir = os.path.join(model_dir, "visualizations")
visualize_weakly_supervised_dataset(
dataset, os.path.join(visualize_dir, "weak"), num_frames=1)
The provided code snippet includes necessary dependencies for implementing the `train_with_gin` function. Write a Python function `def train_with_gin(model_dir, overwrite=False, gin_config_files=None, gin_bindings=None)` to solve the following problem:
Trains a model based on the provided gin configuration. This function will set the provided gin bindings, call the train() function and clear the gin config. Please see the train() for required gin bindings. Args: model_dir: String with path to directory where model output should be saved. overwrite: Boolean indicating whether to overwrite output directory. gin_config_files: List of gin config files to load. gin_bindings: List of gin bindings to use.
Here is the function:
def train_with_gin(model_dir,
overwrite=False,
gin_config_files=None,
gin_bindings=None):
"""Trains a model based on the provided gin configuration.
This function will set the provided gin bindings, call the train() function
and clear the gin config. Please see the train() for required gin bindings.
Args:
model_dir: String with path to directory where model output should be saved.
overwrite: Boolean indicating whether to overwrite output directory.
gin_config_files: List of gin config files to load.
gin_bindings: List of gin bindings to use.
"""
if gin_config_files is None:
gin_config_files = []
if gin_bindings is None:
gin_bindings = []
gin.parse_config_files_and_bindings(gin_config_files, gin_bindings)
train(model_dir, overwrite)
gin.clear_config() | Trains a model based on the provided gin configuration. This function will set the provided gin bindings, call the train() function and clear the gin config. Please see the train() for required gin bindings. Args: model_dir: String with path to directory where model output should be saved. overwrite: Boolean indicating whether to overwrite output directory. gin_config_files: List of gin config files to load. gin_bindings: List of gin bindings to use. |
168,167 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from disentanglement_lib.methods.shared import losses
from disentanglement_lib.methods.shared import optimizers
from disentanglement_lib.methods.unsupervised import vae
from six.moves import zip
import tensorflow.compat.v1 as tf
import gin.tf
from tensorflow_estimator.python.estimator.tpu.tpu_estimator import TPUEstimatorSpec
The provided code snippet includes necessary dependencies for implementing the `make_weak_loss` function. Write a Python function `def make_weak_loss(z1, z2, labels, loss_fn=gin.REQUIRED)` to solve the following problem:
Wrapper that creates weakly-supervised losses.
Here is the function:
def make_weak_loss(z1, z2, labels, loss_fn=gin.REQUIRED):
"""Wrapper that creates weakly-supervised losses."""
return loss_fn(z1, z2, labels) | Wrapper that creates weakly-supervised losses. |
168,168 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from disentanglement_lib.methods.shared import losses
from disentanglement_lib.methods.shared import optimizers
from disentanglement_lib.methods.unsupervised import vae
from six.moves import zip
import tensorflow.compat.v1 as tf
import gin.tf
from tensorflow_estimator.python.estimator.tpu.tpu_estimator import TPUEstimatorSpec
The provided code snippet includes necessary dependencies for implementing the `aggregate_labels` function. Write a Python function `def aggregate_labels(z_mean, z_logvar, new_mean, new_log_var, labels, kl_per_point)` to solve the following problem:
Use labels to aggregate. Labels contains a one-hot encoding with a single 1 of a factor shared. We enforce which dimension of the latent code learn which factor (dimension 1 learns factor 1) and we enforce that each factor of variation is encoded in a single dimension. Args: z_mean: Mean of the encoder distribution for the original image. z_logvar: Logvar of the encoder distribution for the original image. new_mean: Average mean of the encoder distribution of the pair of images. new_log_var: Average logvar of the encoder distribution of the pair of images. labels: One-hot-encoding with the position of the dimension that should not be shared. kl_per_point: Distance between the two encoder distributions (unused). Returns: Mean and logvariance for the new observation.
Here is the function:
def aggregate_labels(z_mean, z_logvar, new_mean, new_log_var, labels,
kl_per_point):
"""Use labels to aggregate.
Labels contains a one-hot encoding with a single 1 of a factor shared. We
enforce which dimension of the latent code learn which factor (dimension 1
learns factor 1) and we enforce that each factor of variation is encoded in a
single dimension.
Args:
z_mean: Mean of the encoder distribution for the original image.
z_logvar: Logvar of the encoder distribution for the original image.
new_mean: Average mean of the encoder distribution of the pair of images.
new_log_var: Average logvar of the encoder distribution of the pair of
images.
labels: One-hot-encoding with the position of the dimension that should not
be shared.
kl_per_point: Distance between the two encoder distributions (unused).
Returns:
Mean and logvariance for the new observation.
"""
del kl_per_point
z_mean_averaged = tf.where(
tf.math.equal(labels,
tf.expand_dims(tf.reduce_max(labels, axis=1), 1)),
z_mean, new_mean)
z_logvar_averaged = tf.where(
tf.math.equal(labels,
tf.expand_dims(tf.reduce_max(labels, axis=1), 1)),
z_logvar, new_log_var)
return z_mean_averaged, z_logvar_averaged | Use labels to aggregate. Labels contains a one-hot encoding with a single 1 of a factor shared. We enforce which dimension of the latent code learn which factor (dimension 1 learns factor 1) and we enforce that each factor of variation is encoded in a single dimension. Args: z_mean: Mean of the encoder distribution for the original image. z_logvar: Logvar of the encoder distribution for the original image. new_mean: Average mean of the encoder distribution of the pair of images. new_log_var: Average logvar of the encoder distribution of the pair of images. labels: One-hot-encoding with the position of the dimension that should not be shared. kl_per_point: Distance between the two encoder distributions (unused). Returns: Mean and logvariance for the new observation. |
168,169 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from disentanglement_lib.methods.shared import losses
from disentanglement_lib.methods.shared import optimizers
from disentanglement_lib.methods.unsupervised import vae
from six.moves import zip
import tensorflow.compat.v1 as tf
import gin.tf
from tensorflow_estimator.python.estimator.tpu.tpu_estimator import TPUEstimatorSpec
def discretize_in_bins(x):
"""Discretize a vector in two bins."""
return tf.histogram_fixed_width_bins(
x, [tf.reduce_min(x), tf.reduce_max(x)], nbins=2)
The provided code snippet includes necessary dependencies for implementing the `aggregate_argmax` function. Write a Python function `def aggregate_argmax(z_mean, z_logvar, new_mean, new_log_var, labels, kl_per_point)` to solve the following problem:
Argmax aggregation with adaptive k. The bottom k dimensions in terms of distance are not averaged. K is estimated adaptively by binning the distance into two bins of equal width. Args: z_mean: Mean of the encoder distribution for the original image. z_logvar: Logvar of the encoder distribution for the original image. new_mean: Average mean of the encoder distribution of the pair of images. new_log_var: Average logvar of the encoder distribution of the pair of images. labels: One-hot-encoding with the position of the dimension that should not be shared. kl_per_point: Distance between the two encoder distributions. Returns: Mean and logvariance for the new observation.
Here is the function:
def aggregate_argmax(z_mean, z_logvar, new_mean, new_log_var, labels,
kl_per_point):
"""Argmax aggregation with adaptive k.
The bottom k dimensions in terms of distance are not averaged. K is
estimated adaptively by binning the distance into two bins of equal width.
Args:
z_mean: Mean of the encoder distribution for the original image.
z_logvar: Logvar of the encoder distribution for the original image.
new_mean: Average mean of the encoder distribution of the pair of images.
new_log_var: Average logvar of the encoder distribution of the pair of
images.
labels: One-hot-encoding with the position of the dimension that should not
be shared.
kl_per_point: Distance between the two encoder distributions.
Returns:
Mean and logvariance for the new observation.
"""
del labels
mask = tf.equal(
tf.map_fn(discretize_in_bins, kl_per_point, tf.int32),
1)
z_mean_averaged = tf.where(mask, z_mean, new_mean)
z_logvar_averaged = tf.where(mask, z_logvar, new_log_var)
return z_mean_averaged, z_logvar_averaged | Argmax aggregation with adaptive k. The bottom k dimensions in terms of distance are not averaged. K is estimated adaptively by binning the distance into two bins of equal width. Args: z_mean: Mean of the encoder distribution for the original image. z_logvar: Logvar of the encoder distribution for the original image. new_mean: Average mean of the encoder distribution of the pair of images. new_log_var: Average logvar of the encoder distribution of the pair of images. labels: One-hot-encoding with the position of the dimension that should not be shared. kl_per_point: Distance between the two encoder distributions. Returns: Mean and logvariance for the new observation. |
168,170 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from disentanglement_lib.methods.shared import losses
from disentanglement_lib.methods.shared import optimizers
from disentanglement_lib.methods.unsupervised import vae
from six.moves import zip
import tensorflow.compat.v1 as tf
import gin.tf
from tensorflow_estimator.python.estimator.tpu.tpu_estimator import TPUEstimatorSpec
def compute_kl(z_1, z_2, logvar_1, logvar_2):
var_1 = tf.exp(logvar_1)
var_2 = tf.exp(logvar_2)
return var_1/var_2 + tf.square(z_2-z_1)/var_2 - 1 + logvar_2 - logvar_1 | null |
168,171 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from disentanglement_lib.methods.shared import losses
from disentanglement_lib.methods.shared import optimizers
from disentanglement_lib.methods.unsupervised import vae
from six.moves import zip
import tensorflow.compat.v1 as tf
import gin.tf
from tensorflow_estimator.python.estimator.tpu.tpu_estimator import TPUEstimatorSpec
The provided code snippet includes necessary dependencies for implementing the `make_metric_fn` function. Write a Python function `def make_metric_fn(*names)` to solve the following problem:
Utility function to report tf.metrics in model functions.
Here is the function:
def make_metric_fn(*names):
"""Utility function to report tf.metrics in model functions."""
def metric_fn(*args):
return {name: tf.metrics.mean(vec) for name, vec in zip(names, args)}
return metric_fn | Utility function to report tf.metrics in model functions. |
168,172 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow.compat.v1 as tf
import tensorflow_probability as tfp
import gin.tf
The provided code snippet includes necessary dependencies for implementing the `bernoulli_loss` function. Write a Python function `def bernoulli_loss(true_images, reconstructed_images, activation, subtract_true_image_entropy=False)` to solve the following problem:
Computes the Bernoulli loss.
Here is the function:
def bernoulli_loss(true_images,
reconstructed_images,
activation,
subtract_true_image_entropy=False):
"""Computes the Bernoulli loss."""
flattened_dim = np.prod(true_images.get_shape().as_list()[1:])
reconstructed_images = tf.reshape(
reconstructed_images, shape=[-1, flattened_dim])
true_images = tf.reshape(true_images, shape=[-1, flattened_dim])
# Because true images are not binary, the lower bound in the xent is not zero:
# the lower bound in the xent is the entropy of the true images.
if subtract_true_image_entropy:
dist = tfp.distributions.Bernoulli(
probs=tf.clip_by_value(true_images, 1e-6, 1 - 1e-6))
loss_lower_bound = tf.reduce_sum(dist.entropy(), axis=1)
else:
loss_lower_bound = 0
if activation == "logits":
loss = tf.reduce_sum(
tf.nn.sigmoid_cross_entropy_with_logits(
logits=reconstructed_images, labels=true_images),
axis=1)
elif activation == "tanh":
reconstructed_images = tf.clip_by_value(
tf.nn.tanh(reconstructed_images) / 2 + 0.5, 1e-6, 1 - 1e-6)
loss = -tf.reduce_sum(
true_images * tf.log(reconstructed_images) +
(1 - true_images) * tf.log(1 - reconstructed_images),
axis=1)
else:
raise NotImplementedError("Activation not supported.")
return loss - loss_lower_bound | Computes the Bernoulli loss. |
168,173 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow.compat.v1 as tf
import tensorflow_probability as tfp
import gin.tf
The provided code snippet includes necessary dependencies for implementing the `make_reconstruction_loss` function. Write a Python function `def make_reconstruction_loss(true_images, reconstructed_images, loss_fn=gin.REQUIRED, activation="logits")` to solve the following problem:
Wrapper that creates reconstruction loss.
Here is the function:
def make_reconstruction_loss(true_images,
reconstructed_images,
loss_fn=gin.REQUIRED,
activation="logits"):
"""Wrapper that creates reconstruction loss."""
with tf.variable_scope("reconstruction_loss"):
per_sample_loss = loss_fn(true_images, reconstructed_images, activation)
return per_sample_loss | Wrapper that creates reconstruction loss. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.