| from numerize.numerize import numerize
|
| import streamlit as st
|
| import pandas as pd
|
| import json
|
| from classes import Channel, Scenario
|
| import numpy as np
|
| from plotly.subplots import make_subplots
|
| import plotly.graph_objects as go
|
| from classes import class_to_dict
|
| from collections import OrderedDict
|
| import io
|
| import plotly
|
| from pathlib import Path
|
| import pickle
|
| import yaml
|
| from yaml import SafeLoader
|
| from streamlit.components.v1 import html
|
| import smtplib
|
| from scipy.optimize import curve_fit
|
| from sklearn.metrics import r2_score
|
| from classes import class_from_dict
|
| import os
|
| import base64
|
|
|
|
|
| color_palette = [
|
| "#F3F3F0",
|
| "#5E7D7E",
|
| "#2FA1FF",
|
| "#00EDED",
|
| "#00EAE4",
|
| "#304550",
|
| "#EDEBEB",
|
| "#7FBEFD",
|
| "#003059",
|
| "#A2F3F3",
|
| "#E1D6E2",
|
| "#B6B6B6",
|
| ]
|
|
|
|
|
| CURRENCY_INDICATOR = '€'
|
|
|
| import streamlit_authenticator as stauth
|
|
|
|
|
| def load_authenticator():
|
| with open("config.yaml") as file:
|
| config = yaml.load(file, Loader=SafeLoader)
|
| st.session_state["config"] = config
|
| authenticator = stauth.Authenticate(
|
| credentials=config["credentials"],
|
| cookie_name=config["cookie"]["name"],
|
| key=config["cookie"]["key"],
|
| cookie_expiry_days=config["cookie"]["expiry_days"],
|
| preauthorized=config["preauthorized"],
|
| )
|
| st.session_state["authenticator"] = authenticator
|
| return authenticator
|
|
|
|
|
|
|
| def authentication():
|
| with open("config.yaml") as file:
|
| config = yaml.load(file, Loader=SafeLoader)
|
|
|
| authenticator = stauth.Authenticate(
|
| config["credentials"],
|
| config["cookie"]["name"],
|
| config["cookie"]["key"],
|
| config["cookie"]["expiry_days"],
|
| config["preauthorized"],
|
| )
|
|
|
| name, authentication_status, username = authenticator.login("Login", "main")
|
| return authenticator, name, authentication_status, username
|
|
|
|
|
| def nav_page(page_name, timeout_secs=3):
|
| nav_script = """
|
| <script type="text/javascript">
|
| function attempt_nav_page(page_name, start_time, timeout_secs) {
|
| var links = window.parent.document.getElementsByTagName("a");
|
| for (var i = 0; i < links.length; i++) {
|
| if (links[i].href.toLowerCase().endsWith("/" + page_name.toLowerCase())) {
|
| links[i].click();
|
| return;
|
| }
|
| }
|
| var elasped = new Date() - start_time;
|
| if (elasped < timeout_secs * 1000) {
|
| setTimeout(attempt_nav_page, 100, page_name, start_time, timeout_secs);
|
| } else {
|
| alert("Unable to navigate to page '" + page_name + "' after " + timeout_secs + " second(s).");
|
| }
|
| }
|
| window.addEventListener("load", function() {
|
| attempt_nav_page("%s", new Date(), %d);
|
| });
|
| </script>
|
| """ % (
|
| page_name,
|
| timeout_secs,
|
| )
|
| html(nav_script)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| path = os.path.dirname(__file__)
|
|
|
| file_ = open(f"{path}/ALDI_2017.png", "rb")
|
|
|
| contents = file_.read()
|
|
|
| data_url = base64.b64encode(contents).decode("utf-8")
|
|
|
| file_.close()
|
|
|
|
|
| DATA_PATH = "./data"
|
|
|
| IMAGES_PATH = "./data/images_224_224"
|
|
|
|
|
| def load_local_css(file_name):
|
|
|
| with open(file_name) as f:
|
|
|
| st.markdown(f"<style>{f.read()}</style>", unsafe_allow_html=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| path1 = os.path.dirname(__file__)
|
|
|
| file_1 = open(f"{path}/ALDI_2017.png", "rb")
|
|
|
| contents1 = file_1.read()
|
|
|
| data_url1 = base64.b64encode(contents1).decode("utf-8")
|
|
|
| file_1.close()
|
|
|
|
|
| DATA_PATH1 = "./data"
|
|
|
| IMAGES_PATH1 = "./data/images_224_224"
|
|
|
|
|
| def set_header():
|
| return st.markdown(
|
| f"""<div class='main-header'>
|
| <!-- <h1></h1> -->
|
| <div >
|
| <img class='blend-logo' src="data:image;base64,{data_url1}", alt="Logo">
|
| </div>""",
|
| unsafe_allow_html=True,
|
| )
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| def s_curve(x, K, b, a, x0):
|
| return K / (1 + b * np.exp(-a * (x - x0)))
|
|
|
|
|
| def panel_level(input_df, date_column="Date"):
|
|
|
| if date_column not in input_df.index.names:
|
| input_df = input_df.set_index(date_column)
|
|
|
|
|
| numeric_columns_df = input_df.select_dtypes(include="number")
|
|
|
|
|
| aggregated_df = numeric_columns_df.groupby(input_df.index).sum()
|
|
|
|
|
| aggregated_df = aggregated_df.reset_index()
|
|
|
| return aggregated_df
|
|
|
|
|
| def initialize_data(
|
| panel=None, target_file=r"Overview_data_test_panel@#revenue.xlsx", updated_rcs=None, metrics=None
|
| ):
|
|
|
|
|
|
|
|
|
|
|
| excel = pd.read_excel(target_file, sheet_name=None)
|
|
|
|
|
| raw_df = excel["RAW DATA MMM"]
|
| spend_df = excel["SPEND INPUT"]
|
| contri_df = excel["CONTRIBUTION MMM"]
|
|
|
|
|
| if panel is not None and panel != "Total Market":
|
| raw_df = raw_df[raw_df["Panel"] == panel].drop(columns=["Panel"])
|
| spend_df = spend_df[spend_df["Panel"] == panel].drop(columns=["Panel"])
|
| contri_df = contri_df[contri_df["Panel"] == panel].drop(columns=["Panel"])
|
| elif panel == "Total Market":
|
| raw_df = panel_level(raw_df, date_column="Date")
|
| spend_df = panel_level(spend_df, date_column="Week")
|
| contri_df = panel_level(contri_df, date_column="Date")
|
|
|
|
|
|
|
|
|
| exclude_columns = [
|
| "Date",
|
| "Region",
|
| "Controls_Grammarly_Index_SeasonalAVG",
|
| "Controls_Quillbot_Index",
|
| "Daily_Positive_Outliers",
|
| "External_RemoteClass_Index",
|
| "Intervals ON 20190520-20190805 | 20200518-20200803 | 20210517-20210802",
|
| "Intervals ON 20190826-20191209 | 20200824-20201207 | 20210823-20211206",
|
| "Intervals ON 20201005-20201019",
|
| "Promotion_PercentOff",
|
| "Promotion_TimeBased",
|
| "Seasonality_Indicator_Chirstmas",
|
| "Seasonality_Indicator_NewYears_Days",
|
| "Seasonality_Indicator_Thanksgiving",
|
| "Trend 20200302 / 20200803",
|
| ]
|
| raw_df["Date"] = pd.to_datetime(raw_df["Date"])
|
| contri_df["Date"] = pd.to_datetime(contri_df["Date"])
|
| input_df = raw_df.sort_values(by="Date")
|
| output_df = contri_df.sort_values(by="Date")
|
| spend_df["Week"] = pd.to_datetime(
|
| spend_df["Week"], format="%Y-%m-%d", errors="coerce"
|
| )
|
| spend_df.sort_values(by="Week", inplace=True)
|
|
|
|
|
|
|
|
|
| channel_list = [col for col in input_df.columns if col not in exclude_columns]
|
| channel_list = list(set(channel_list) - set(["fb_level_achieved_tier_1", "ga_app"]))
|
|
|
| response_curves = {}
|
| mapes = {}
|
| rmses = {}
|
| upper_limits = {}
|
| powers = {}
|
| r2 = {}
|
| conv_rates = {}
|
| output_cols = []
|
| channels = {}
|
| sales = None
|
| dates = input_df.Date.values
|
| actual_output_dic = {}
|
| actual_input_dic = {}
|
|
|
| for inp_col in channel_list:
|
|
|
| spends = input_df[inp_col].values
|
| x = spends.copy()
|
|
|
| upper_limits[inp_col] = 2 * x.max()
|
|
|
|
|
| out_col = [_col for _col in output_df.columns if _col.startswith(inp_col)][0]
|
| y = output_df[out_col].values.copy()
|
| actual_output_dic[inp_col] = y.copy()
|
| actual_input_dic[inp_col] = x.copy()
|
|
|
| output_cols.append(out_col)
|
|
|
|
|
| power = np.ceil(np.log(x.max()) / np.log(10)) - 3
|
| if power >= 0:
|
| x = x / 10**power
|
|
|
| x = x.astype("float64")
|
| y = y.astype("float64")
|
|
|
|
|
|
|
|
|
| bounds = ((0, 0, 0, 0), (3 * y.max(), 1000, 1, x.max()))
|
|
|
|
|
| params, _ = curve_fit(
|
| s_curve,
|
| x,
|
| y,
|
| p0=(2 * y.max(), 0.01, 1e-5, x.max()),
|
| bounds=bounds,
|
| maxfev=int(1e5),
|
| )
|
| mape = (100 * abs(1 - s_curve(x, *params) / y.clip(min=1))).mean()
|
| rmse = np.sqrt(((y - s_curve(x, *params)) ** 2).mean())
|
| r2_ = r2_score(y, s_curve(x, *params))
|
|
|
| response_curves[inp_col] = {
|
| "K": params[0],
|
| "b": params[1],
|
| "a": params[2],
|
| "x0": params[3],
|
| }
|
|
|
| updated_rcs_key = f"{metrics}#@{panel}#@{inp_col}"
|
| if updated_rcs is not None and updated_rcs_key in list(updated_rcs.keys()):
|
| response_curves[inp_col] = updated_rcs[updated_rcs_key]
|
|
|
| mapes[inp_col] = mape
|
| rmses[inp_col] = rmse
|
| r2[inp_col] = r2_
|
| powers[inp_col] = power
|
|
|
|
|
| spend_col = [
|
| _col
|
| for _col in spend_df.columns
|
| if _col.startswith(inp_col.rsplit("_", 1)[0])
|
| ][0]
|
|
|
|
|
|
|
| conv = (
|
| spend_df.set_index("Week")[spend_col]
|
| / input_df.set_index("Date")[inp_col].clip(lower=1)
|
| ).reset_index()
|
| conv.rename(columns={"index": "Week"}, inplace=True)
|
| conv["year"] = conv.Week.dt.year
|
| conv_rates[inp_col] = list(conv.drop("Week", axis=1).mean().to_dict().values())[
|
| 0
|
| ]
|
|
|
|
|
|
|
|
|
| channel = Channel(
|
| name=inp_col,
|
| dates=dates,
|
| spends=spends,
|
|
|
| conversion_rate=conv_rates[inp_col],
|
| response_curve_type="s-curve",
|
| response_curve_params={
|
| "K": params[0],
|
| "b": params[1],
|
| "a": params[2],
|
| "x0": params[3],
|
| },
|
| bounds=np.array([-10, 10]),
|
| )
|
| channels[inp_col] = channel
|
| if sales is None:
|
| sales = channel.actual_sales
|
| else:
|
| sales += channel.actual_sales
|
| other_contributions = (
|
| output_df.drop([*output_cols], axis=1).sum(axis=1, numeric_only=True).values
|
| )
|
| correction = output_df.drop("Date", axis=1).sum(axis=1).values - (
|
| sales + other_contributions
|
| )
|
| scenario = Scenario(
|
| name="default",
|
| channels=channels,
|
| constant=other_contributions,
|
| correction=correction,
|
| )
|
|
|
| st.session_state["initialized"] = True
|
| st.session_state["actual_df"] = input_df
|
| st.session_state["raw_df"] = raw_df
|
| st.session_state["contri_df"] = output_df
|
| default_scenario_dict = class_to_dict(scenario)
|
| st.session_state["default_scenario_dict"] = default_scenario_dict
|
| st.session_state["scenario"] = scenario
|
| st.session_state["channels_list"] = channel_list
|
| st.session_state["optimization_channels"] = {
|
| channel_name: False for channel_name in channel_list
|
| }
|
| st.session_state["rcs"] = response_curves
|
|
|
| st.session_state["powers"] = powers
|
| st.session_state["actual_contribution_df"] = pd.DataFrame(actual_output_dic)
|
| st.session_state["actual_input_df"] = pd.DataFrame(actual_input_dic)
|
|
|
| for channel in channels.values():
|
| st.session_state[channel.name] = numerize(
|
| channel.actual_total_spends * channel.conversion_rate, 1
|
| )
|
|
|
| st.session_state["xlsx_buffer"] = io.BytesIO()
|
|
|
| if Path("../saved_scenarios.pkl").exists():
|
| with open("../saved_scenarios.pkl", "rb") as f:
|
| st.session_state["saved_scenarios"] = pickle.load(f)
|
| else:
|
| st.session_state["saved_scenarios"] = OrderedDict()
|
|
|
|
|
| st.session_state["optimization_channels"] = {
|
| channel_name: False for channel_name in channel_list
|
| }
|
| st.session_state["disable_download_button"] = True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| def create_channel_summary(scenario):
|
|
|
|
|
| data = {
|
| "Channel": [
|
| "Paid Search",
|
| "Ga will cid baixo risco",
|
| "Digital tactic others",
|
| "Fb la tier 1",
|
| "Fb la tier 2",
|
| "Paid social others",
|
| "Programmatic",
|
| "Kwai",
|
| "Indicacao",
|
| "Infleux",
|
| "Influencer",
|
| ],
|
| "Spends": [
|
| "$ 11.3K",
|
| "$ 155.2K",
|
| "$ 50.7K",
|
| "$ 125.4K",
|
| "$ 125.2K",
|
| "$ 105K",
|
| "$ 3.3M",
|
| "$ 47.5K",
|
| "$ 55.9K",
|
| "$ 632.3K",
|
| "$ 48.3K",
|
| ],
|
| "Revenue": [
|
| "558.0K",
|
| "3.5M",
|
| "5.2M",
|
| "3.1M",
|
| "3.1M",
|
| "2.1M",
|
| "20.8M",
|
| "1.6M",
|
| "728.4K",
|
| "22.9M",
|
| "4.8M",
|
| ],
|
| }
|
|
|
|
|
| df = pd.DataFrame(data)
|
|
|
|
|
| df["Spends"] = (
|
| df["Spends"]
|
| .replace({"\$": "", "K": "*1e3", "M": "*1e6"}, regex=True)
|
| .map(pd.eval)
|
| .astype(int)
|
| )
|
| df["Revenue"] = (
|
| df["Revenue"]
|
| .replace({"\$": "", "K": "*1e3", "M": "*1e6"}, regex=True)
|
| .map(pd.eval)
|
| .astype(int)
|
| )
|
|
|
|
|
| df["ROI"] = (df["Revenue"] - df["Spends"]) / df["Spends"]
|
|
|
|
|
| format_currency = lambda x: f"${x:,.1f}"
|
| format_roi = lambda x: f"{x:.1f}"
|
|
|
| df["Spends"] = [
|
| "$ 11.3K",
|
| "$ 155.2K",
|
| "$ 50.7K",
|
| "$ 125.4K",
|
| "$ 125.2K",
|
| "$ 105K",
|
| "$ 3.3M",
|
| "$ 47.5K",
|
| "$ 55.9K",
|
| "$ 632.3K",
|
| "$ 48.3K",
|
| ]
|
| df["Revenue"] = [
|
| "$ 536.3K",
|
| "$ 3.4M",
|
| "$ 5M",
|
| "$ 3M",
|
| "$ 3M",
|
| "$ 2M",
|
| "$ 20M",
|
| "$ 1.5M",
|
| "$ 7.1M",
|
| "$ 22M",
|
| "$ 4.6M",
|
| ]
|
| df["ROI"] = df["ROI"].apply(format_roi)
|
|
|
| return df
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| def create_contribution_pie():
|
| color_palette = [
|
| "#F3F3F0",
|
| "#5E7D7E",
|
| "#2FA1FF",
|
| "#00EDED",
|
| "#00EAE4",
|
| "#304550",
|
| "#EDEBEB",
|
| "#7FBEFD",
|
| "#003059",
|
| "#A2F3F3",
|
| "#E1D6E2",
|
| "#B6B6B6",
|
| ]
|
| total_contribution_fig = make_subplots(
|
| rows=1,
|
| cols=2,
|
| subplot_titles=["Spends", "Revenue"],
|
| specs=[[{"type": "pie"}, {"type": "pie"}]],
|
| )
|
|
|
| channels_list = [
|
| "Paid Search",
|
| "Ga will cid baixo risco",
|
| "Digital tactic others",
|
| "Fb la tier 1",
|
| "Fb la tier 2",
|
| "Paid social others",
|
| "Programmatic",
|
| "Kwai",
|
| "Indicacao",
|
| "Infleux",
|
| "Influencer",
|
| "Non Media",
|
| ]
|
|
|
|
|
| colors_map = {
|
| col: color_palette[i % len(color_palette)]
|
| for i, col in enumerate(channels_list)
|
| }
|
| colors_map["Non Media"] = color_palette[
|
| 5
|
| ]
|
|
|
|
|
| spends_values = [0.5, 3.36, 1.1, 2.7, 2.7, 2.27, 70.6, 1, 1, 13.7, 1, 0]
|
| revenue_values = [1, 4, 5, 3, 3, 2, 50.8, 1.5, 0.7, 13, 0, 16]
|
|
|
|
|
| total_contribution_fig.add_trace(
|
| go.Pie(
|
| labels=[channel_name for channel_name in channels_list],
|
| values=spends_values,
|
| marker=dict(
|
| colors=[colors_map[channel_name] for channel_name in channels_list]
|
| ),
|
| hole=0.3,
|
| ),
|
| row=1,
|
| col=1,
|
| )
|
|
|
|
|
| total_contribution_fig.add_trace(
|
| go.Pie(
|
| labels=[channel_name for channel_name in channels_list],
|
| values=revenue_values,
|
| marker=dict(
|
| colors=[colors_map[channel_name] for channel_name in channels_list]
|
| ),
|
| hole=0.3,
|
| ),
|
| row=1,
|
| col=2,
|
| )
|
|
|
| total_contribution_fig.update_traces(
|
| textposition="inside", texttemplate="%{percent:.1%}"
|
| )
|
| total_contribution_fig.update_layout(
|
| uniformtext_minsize=12, title="Channel contribution", uniformtext_mode="hide"
|
| )
|
| return total_contribution_fig
|
|
|
|
|
| def create_contribuion_stacked_plot(scenario):
|
| weekly_contribution_fig = make_subplots(
|
| rows=1,
|
| cols=2,
|
| subplot_titles=["Spends", "Revenue"],
|
| specs=[[{"type": "bar"}, {"type": "bar"}]],
|
| )
|
| raw_df = st.session_state["raw_df"]
|
| df = raw_df.sort_values(by="Date")
|
| x = df.Date
|
| weekly_spends_data = []
|
| weekly_sales_data = []
|
|
|
| for i, channel_name in enumerate(st.session_state["channels_list"]):
|
| color = color_palette[i % len(color_palette)]
|
|
|
| weekly_spends_data.append(
|
| go.Bar(
|
| x=x,
|
| y=scenario.channels[channel_name].actual_spends
|
| * scenario.channels[channel_name].conversion_rate,
|
| name=channel_name_formating(channel_name),
|
| hovertemplate="Date:%{x}<br>Spend:%{y:$.2s}",
|
| legendgroup=channel_name,
|
| marker_color=color,
|
| )
|
| )
|
|
|
| weekly_sales_data.append(
|
| go.Bar(
|
| x=x,
|
| y=scenario.channels[channel_name].actual_sales,
|
| name=channel_name_formating(channel_name),
|
| hovertemplate="Date:%{x}<br>Revenue:%{y:$.2s}",
|
| legendgroup=channel_name,
|
| showlegend=False,
|
| marker_color=color,
|
| )
|
| )
|
|
|
| for _d in weekly_spends_data:
|
| weekly_contribution_fig.add_trace(_d, row=1, col=1)
|
| for _d in weekly_sales_data:
|
| weekly_contribution_fig.add_trace(_d, row=1, col=2)
|
|
|
| weekly_contribution_fig.add_trace(
|
| go.Bar(
|
| x=x,
|
| y=scenario.constant + scenario.correction,
|
| name="Non Media",
|
| hovertemplate="Date:%{x}<br>Revenue:%{y:$.2s}",
|
| marker_color=color_palette[-1],
|
| ),
|
| row=1,
|
| col=2,
|
| )
|
|
|
| weekly_contribution_fig.update_layout(
|
| barmode="stack", title="Channel contribution by week", xaxis_title="Date"
|
| )
|
| weekly_contribution_fig.update_xaxes(showgrid=False)
|
| weekly_contribution_fig.update_yaxes(showgrid=False)
|
| return weekly_contribution_fig
|
|
|
|
|
| def create_channel_spends_sales_plot(channel):
|
| if channel is not None:
|
| x = channel.dates
|
| _spends = channel.actual_spends * channel.conversion_rate
|
| _sales = channel.actual_sales
|
| channel_sales_spends_fig = make_subplots(specs=[[{"secondary_y": True}]])
|
| channel_sales_spends_fig.add_trace(
|
| go.Bar(
|
| x=x,
|
| y=_sales,
|
| marker_color=color_palette[
|
| 3
|
| ],
|
| name="Revenue",
|
| hovertemplate="Date:%{x}<br>Revenue:%{y:$.2s}",
|
| ),
|
| secondary_y=False,
|
| )
|
|
|
| channel_sales_spends_fig.add_trace(
|
| go.Scatter(
|
| x=x,
|
| y=_spends,
|
| line=dict(
|
| color=color_palette[2]
|
| ),
|
| name="Spends",
|
| hovertemplate="Date:%{x}<br>Spend:%{y:$.2s}",
|
| ),
|
| secondary_y=True,
|
| )
|
|
|
| channel_sales_spends_fig.update_layout(
|
| xaxis_title="Date",
|
| yaxis_title="Revenue",
|
| yaxis2_title="Spends ($)",
|
| title="Channel spends and Revenue week-wise",
|
| )
|
| channel_sales_spends_fig.update_xaxes(showgrid=False)
|
| channel_sales_spends_fig.update_yaxes(showgrid=False)
|
| else:
|
| raw_df = st.session_state["raw_df"]
|
| df = raw_df.sort_values(by="Date")
|
| x = df.Date
|
| scenario = class_from_dict(st.session_state["default_scenario_dict"])
|
| _sales = scenario.constant + scenario.correction
|
| channel_sales_spends_fig = make_subplots(specs=[[{"secondary_y": True}]])
|
| channel_sales_spends_fig.add_trace(
|
| go.Bar(
|
| x=x,
|
| y=_sales,
|
| marker_color=color_palette[
|
| 0
|
| ],
|
| name="Revenue",
|
| hovertemplate="Date:%{x}<br>Revenue:%{y:$.2s}",
|
| ),
|
| secondary_y=False,
|
| )
|
|
|
| channel_sales_spends_fig.update_layout(
|
| xaxis_title="Date",
|
| yaxis_title="Revenue",
|
| yaxis2_title="Spends ($)",
|
| title="Channel spends and Revenue week-wise",
|
| )
|
| channel_sales_spends_fig.update_xaxes(showgrid=False)
|
| channel_sales_spends_fig.update_yaxes(showgrid=False)
|
|
|
| return channel_sales_spends_fig
|
|
|
|
|
| def format_numbers(value, n_decimals=1, include_indicator=True):
|
| if include_indicator:
|
| return f"{CURRENCY_INDICATOR} {numerize(value,n_decimals)}"
|
| else:
|
| return f"{numerize(value,n_decimals)}"
|
|
|
|
|
| def decimal_formater(num_string, n_decimals=1):
|
| parts = num_string.split(".")
|
| if len(parts) == 1:
|
| return num_string + "." + "0" * n_decimals
|
| else:
|
| to_be_padded = n_decimals - len(parts[-1])
|
| if to_be_padded > 0:
|
| return num_string + "0" * to_be_padded
|
| else:
|
| return num_string
|
|
|
|
|
| def channel_name_formating(channel_name):
|
| name_mod = channel_name.replace("_", " ")
|
| if name_mod.lower().endswith(" imp"):
|
| name_mod = name_mod.replace("Imp", "Spend")
|
| elif name_mod.lower().endswith(" clicks"):
|
| name_mod = name_mod.replace("Clicks", "Spend")
|
| return name_mod
|
|
|
|
|
| def send_email(email, message):
|
| s = smtplib.SMTP("smtp.gmail.com", 587)
|
| s.starttls()
|
| s.login("geethu4444@gmail.com", "jgydhpfusuremcol")
|
| s.sendmail("geethu4444@gmail.com", email, message)
|
| s.quit()
|
|
|
|
|
| if __name__ == "__main__":
|
| initialize_data()
|
|
|