File size: 25,473 Bytes
4d06747
 
 
 
7e0ae49
4a5ff6e
4d06747
 
4a5ff6e
 
 
 
 
 
 
 
 
4d06747
10a6fd4
7e0ae49
 
 
4a5ff6e
7e0ae49
 
4a5ff6e
7e0ae49
 
 
4a5ff6e
7e0ae49
4a5ff6e
7e0ae49
 
fc95108
4a5ff6e
 
7e0ae49
89ba0fe
4a5ff6e
 
7e0ae49
a85e11a
 
 
 
 
 
 
4a5ff6e
89ba0fe
4a5ff6e
 
89ba0fe
4a5ff6e
 
89ba0fe
4a5ff6e
 
 
 
 
10a6fd4
 
 
 
7e0ae49
 
 
4d06747
 
4a5ff6e
a654c50
4d06747
 
 
7e0ae49
4a5ff6e
7e0ae49
4d06747
7e0ae49
4a5ff6e
 
 
7e0ae49
4a5ff6e
4d06747
a654c50
4d06747
 
 
 
a654c50
 
7e0ae49
a654c50
7e0ae49
a654c50
4d06747
 
 
a654c50
4d06747
a654c50
4d06747
a654c50
 
 
 
 
 
 
 
 
 
 
 
 
4d06747
4a5ff6e
 
4d06747
 
a654c50
4d06747
a654c50
4d06747
 
a654c50
 
4d06747
4a5ff6e
a654c50
4d06747
a654c50
4d06747
 
 
 
a654c50
4d06747
 
4a5ff6e
 
 
4d06747
 
 
4a5ff6e
 
 
 
 
 
 
 
 
 
 
 
 
 
4d06747
7e0ae49
4a5ff6e
 
 
10a6fd4
a654c50
10a6fd4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a654c50
10a6fd4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4d06747
4a5ff6e
a654c50
4d06747
7e0ae49
a654c50
4a5ff6e
 
 
 
a654c50
 
 
 
4a5ff6e
a654c50
4d06747
a654c50
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4a5ff6e
a654c50
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
89ba0fe
4a5ff6e
10a6fd4
 
4a5ff6e
a654c50
4a5ff6e
 
 
10a6fd4
4a5ff6e
 
 
10a6fd4
 
4a5ff6e
fc95108
4a5ff6e
10a6fd4
 
 
 
 
7e0ae49
4a5ff6e
10a6fd4
 
 
 
 
7e0ae49
 
 
 
4a5ff6e
 
10a6fd4
 
4a5ff6e
 
7e0ae49
 
4a5ff6e
10a6fd4
 
 
4a5ff6e
 
7e0ae49
 
10a6fd4
4a5ff6e
10a6fd4
 
 
4a5ff6e
 
7e0ae49
 
10a6fd4
4a5ff6e
10a6fd4
 
 
4a5ff6e
 
7e0ae49
a654c50
10a6fd4
 
 
 
 
 
 
 
 
 
 
 
 
 
a654c50
10a6fd4
 
 
 
 
4a5ff6e
10a6fd4
7e0ae49
fc95108
7e0ae49
10a6fd4
 
 
7e0ae49
a654c50
 
7e0ae49
4a5ff6e
10a6fd4
 
 
4a5ff6e
10a6fd4
 
 
4a5ff6e
 
89ba0fe
a654c50
10a6fd4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4a5ff6e
a654c50
10a6fd4
4a5ff6e
10a6fd4
 
 
 
a654c50
10a6fd4
 
4a5ff6e
10a6fd4
 
 
a654c50
10a6fd4
 
4a5ff6e
10a6fd4
 
 
 
a654c50
10a6fd4
 
4a5ff6e
10a6fd4
 
 
a654c50
10a6fd4
 
 
4a5ff6e
10a6fd4
89ba0fe
10a6fd4
 
 
 
 
 
a654c50
10a6fd4
 
 
 
 
 
a654c50
10a6fd4
 
 
 
 
a654c50
10a6fd4
 
 
 
 
 
a654c50
10a6fd4
 
 
4a5ff6e
a654c50
4a5ff6e
 
a654c50
 
4a5ff6e
 
89ba0fe
a654c50
 
 
 
 
 
 
 
10a6fd4
a654c50
10a6fd4
 
 
a654c50
10a6fd4
 
a654c50
10a6fd4
a654c50
 
 
 
 
10a6fd4
a654c50
 
 
10a6fd4
 
a654c50
 
 
 
 
 
 
 
 
10a6fd4
89ba0fe
a654c50
4a5ff6e
a654c50
 
 
 
 
baaf3da
10a6fd4
a654c50
 
 
 
 
 
 
10a6fd4
 
7e0ae49
a654c50
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10a6fd4
a654c50
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7e0ae49
10a6fd4
a654c50
 
 
10a6fd4
4a5ff6e
 
a654c50
baaf3da
89ba0fe
4a5ff6e
10a6fd4
a654c50
4a5ff6e
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
import streamlit as st
import numpy as np
import pandas as pd
import plotly.express as px
import plotly.graph_objects as go
from plotly.subplots import make_subplots
from datetime import datetime, timedelta
from sklearn.linear_model import LinearRegression
import warnings
warnings.filterwarnings('ignore')

st.set_page_config(
    page_title="Profitability Intelligence Suite",
    page_icon="πŸ“Š",
    layout="wide",
    initial_sidebar_state="collapsed"
)

# Custom CSS
st.markdown("""
<style>
    .main-header {
        font-size: 2.8rem;
        font-weight: 700;
        color: #1f77b4;
        text-align: center;
        margin-bottom: 0.5rem;
    }
    .sub-header {
        font-size: 1.2rem;
        color: #666;
        text-align: center;
        margin-bottom: 2rem;
    }
    .insight-box {
        background: #f8f9fa;
        border-left: 5px solid #1f77b4;
        padding: 1.5rem;
        margin: 1rem 0;
        border-radius: 8px;
        box-shadow: 0 4px 8px rgba(0,0,0,0.05);
    }
    .warning-box {
        background: #fff3cd;
        border-left: 5px solid #ffc107;
        padding: 1.5rem;
        margin: 1rem 0;
        border-radius: 8px;
    }
    .recommendation-card {
        background: white;
        border: 2px solid #e9ecef;
        border-radius: 12px;
        padding: 1.5rem;
        margin: 1rem 0;
        box-shadow: 0 4px 12px rgba(0,0,0,0.08);
    }
    .positive-impact {
        color: #28a745;
        font-weight: 700;
        font-size: 1.5rem;
    }
    .negative-impact {
        color: #dc3545;
        font-weight: 700;
        font-size: 1.5rem;
    }
</style>
""", unsafe_allow_html=True)

@st.cache_data(show_spinner=False)
def generate_synthetic_data(days=60, seed=42, rows_per_day=600):
    """Generate data with REALISTIC variance patterns"""
    rng = np.random.default_rng(seed)
    start_date = datetime.today().date() - timedelta(days=days)
    dates = pd.date_range(start_date, periods=days, freq="D")
    products = ["Premium Widget", "Standard Widget", "Economy Widget", "Deluxe Widget"]
    regions = ["Americas", "EMEA", "Asia Pacific"]
    channels = ["Direct Sales", "Distribution Partners", "E-Commerce"]

    base_price = {"Premium Widget": 120, "Standard Widget": 135, "Economy Widget": 110, "Deluxe Widget": 150}
    base_cost = {"Premium Widget": 70, "Standard Widget": 88, "Economy Widget": 60, "Deluxe Widget": 95}
    region_price_bump = {"Americas": 1.00, "EMEA": 1.03, "Asia Pacific": 0.97}
    region_cost_bump = {"Americas": 1.00, "EMEA": 1.02, "Asia Pacific": 1.01}
    channel_discount_mean = {"Direct Sales": 0.06, "Distribution Partners": 0.12, "E-Commerce": 0.04}
    channel_discount_std = {"Direct Sales": 0.02, "Distribution Partners": 0.03, "E-Commerce": 0.02}

    # Elasticity varies by segment
    seg_epsilon = {}
    for p in products:
        for r in regions:
            for c in channels:
                # More realistic elasticity range: -0.5 to -2.5
                base_eps = rng.uniform(-2.5, -0.5)
                if c == "Distribution Partners":
                    base_eps -= rng.uniform(0.3, 0.8)  # More price sensitive
                if c == "E-Commerce":
                    base_eps -= rng.uniform(0.2, 0.5)  # Also price sensitive
                seg_epsilon[(p, r, c)] = base_eps

    records = []
    for idx, d in enumerate(dates):
        dow = d.weekday()
        dow_mult = 1.0 + (0.08 if dow in (5, 6) else 0)

        # Add realistic seasonality and random shocks
        seasonal = 1.0 + 0.05*np.sin((d.toordinal()%365)/365*2*np.pi)

        # Random market shocks (some days have big changes)
        if rng.random() < 0.15:  # 15% of days have shocks
            market_shock = rng.uniform(0.85, 1.15)
        else:
            market_shock = 1.0

        # Gradual cost trends
        cost_trend = 1.0 + (idx / len(dates)) * 0.03  # 3% cost increase over period

        n = int(rows_per_day * market_shock * seasonal)
        prod = rng.choice(products, size=n, p=[0.35, 0.3, 0.2, 0.15])
        reg = rng.choice(regions, size=n, p=[0.4, 0.35, 0.25])
        ch = rng.choice(channels, size=n, p=[0.45, 0.35, 0.20])

        base_p = np.array([base_price[x] for x in prod]) * np.array([region_price_bump[x] for x in reg])
        base_c = np.array([base_cost[x] for x in prod]) * np.array([region_cost_bump[x] for x in reg]) * cost_trend

        # More variance in discounts
        discount = np.clip(
            np.array([channel_discount_mean[x] for x in ch]) +
            rng.normal(0, [channel_discount_std[x] * 2 for x in ch]),  # Double the variance
            0, 0.45
        )

        list_price = rng.normal(base_p, 8)  # More price variance
        net_price = np.clip(list_price * (1 - discount), 20, None)
        unit_cost = np.clip(rng.normal(base_c, 6), 10, None)

        eps = np.array([seg_epsilon[(pp, rr, cc)] for pp, rr, cc in zip(prod, reg, ch)])
        ref_price = np.array([base_price[x] for x in prod])
        qty_mu = np.exp(eps * (net_price - ref_price) / np.maximum(ref_price, 1e-6))
        qty = np.maximum(1, rng.poisson(8 * dow_mult * seasonal * market_shock * qty_mu))

        revenue = net_price * qty
        cogs = unit_cost * qty
        gm_val = revenue - cogs
        gm_pct = np.where(revenue > 0, gm_val / revenue, 0.0)

        for i in range(n):
            records.append({
                "date": d,
                "product": prod[i],
                "region": reg[i],
                "channel": ch[i],
                "list_price": float(list_price[i]),
                "discount_pct": float(discount[i]),
                "net_price": float(net_price[i]),
                "unit_cost": float(unit_cost[i]),
                "qty": int(qty[i]),
                "revenue": float(revenue[i]),
                "cogs": float(cogs[i]),
                "gm_value": float(gm_val[i]),
                "gm_pct": float(gm_pct[i]),
                "dow": dow
            })

    df = pd.DataFrame(records)
    return df

def analyze_margin_bridge(df, current_date, prior_date):
    """Professional Price-Volume-Mix (PVM) analysis"""
    current_data = df[df["date"] == current_date].copy()
    prior_data = df[df["date"] == prior_date].copy()

    current_total_revenue = current_data["revenue"].sum()
    current_total_cogs = current_data["cogs"].sum()
    current_total_gm = current_total_revenue - current_total_cogs
    current_gm_pct = current_total_gm / current_total_revenue if current_total_revenue > 0 else 0

    prior_total_revenue = prior_data["revenue"].sum()
    prior_total_cogs = prior_data["cogs"].sum()
    prior_total_gm = prior_total_revenue - prior_total_cogs
    prior_gm_pct = prior_total_gm / prior_total_revenue if prior_total_revenue > 0 else 0

    total_gm_variance = current_total_gm - prior_total_gm

    current_seg = current_data.groupby(["product", "region", "channel"]).agg({
        "revenue": "sum",
        "cogs": "sum",
        "qty": "sum",
        "net_price": "mean",
        "unit_cost": "mean"
    }).reset_index()
    current_seg["gm"] = current_seg["revenue"] - current_seg["cogs"]
    current_seg["gm_pct"] = current_seg["gm"] / current_seg["revenue"]

    prior_seg = prior_data.groupby(["product", "region", "channel"]).agg({
        "revenue": "sum",
        "cogs": "sum",
        "qty": "sum",
        "net_price": "mean",
        "unit_cost": "mean"
    }).reset_index()
    prior_seg["gm"] = prior_seg["revenue"] - prior_seg["cogs"]
    prior_seg["gm_pct"] = prior_seg["gm"] / prior_seg["revenue"]

    merged = pd.merge(
        current_seg, 
        prior_seg, 
        on=["product", "region", "channel"], 
        suffixes=("_curr", "_prior"),
        how="outer"
    ).fillna(0)

    # PVM Decomposition
    merged["price_effect"] = (merged["net_price_curr"] - merged["net_price_prior"]) * merged["qty_curr"]
    merged["volume_effect"] = (merged["qty_curr"] - merged["qty_prior"]) * merged["net_price_prior"] * merged["gm_pct_prior"]
    merged["cost_effect"] = -(merged["unit_cost_curr"] - merged["unit_cost_prior"]) * merged["qty_curr"]
    merged["gm_variance"] = merged["gm_curr"] - merged["gm_prior"]
    merged["mix_effect"] = merged["gm_variance"] - (merged["price_effect"] + merged["volume_effect"] + merged["cost_effect"])

    return merged, {
        "total_gm_variance": total_gm_variance,
        "price_effect_total": merged["price_effect"].sum(),
        "volume_effect_total": merged["volume_effect"].sum(),
        "cost_effect_total": merged["cost_effect"].sum(),
        "mix_effect_total": merged["mix_effect"].sum(),
        "current_gm": current_total_gm,
        "prior_gm": prior_total_gm,
        "current_gm_pct": current_gm_pct,
        "prior_gm_pct": prior_gm_pct
    }

def estimate_segment_elasticity(df, product, region, channel):
    """Estimate price elasticity for a segment"""
    seg_df = df[(df["product"]==product)&(df["region"]==region)&(df["channel"]==channel)]
    if len(seg_df) < 100 or seg_df["net_price"].std() < 1e-6 or seg_df["qty"].std() < 1e-6:
        return -1.2, False  # Default elasticity
    try:
        x = np.log(np.clip(seg_df["net_price"].values, 1e-6, None)).reshape(-1,1)
        y = np.log(np.clip(seg_df["qty"].values, 1e-6, None))
        lin = LinearRegression().fit(x, y)
        elasticity = float(lin.coef_[0])
        # Bound elasticity to realistic range
        elasticity = np.clip(elasticity, -5.0, -0.3)
        return elasticity, True
    except:
        return -1.2, False

def find_optimal_discount(base_data, elasticity, search_range=(-10, 10)):
    """
    Find profit-maximizing discount using price elasticity of demand
    Can recommend INCREASING or DECREASING discount
    """
    current_discount = base_data["discount_pct"]
    current_list_price = base_data["list_price"]
    current_price = base_data["net_price"]
    current_cost = base_data["unit_cost"]
    current_qty = base_data["qty"]

    # Test discount changes from -10pp to +10pp
    discount_changes = np.linspace(search_range[0], search_range[1], 41)
    results = []

    for disc_change in discount_changes:
        new_discount = np.clip(current_discount + (disc_change/100), 0.0, 0.50)
        new_price = current_list_price * (1 - new_discount)

        # Apply elasticity
        if current_price > 0:
            price_ratio = new_price / current_price
            new_qty = current_qty * (price_ratio ** elasticity)
        else:
            new_qty = current_qty

        new_revenue = new_price * new_qty
        new_cogs = current_cost * new_qty
        new_gm = new_revenue - new_cogs

        results.append({
            "discount_change": disc_change,
            "new_discount": new_discount * 100,
            "new_price": new_price,
            "new_qty": new_qty,
            "new_gm": new_gm,
            "new_revenue": new_revenue
        })

    results_df = pd.DataFrame(results)
    optimal_idx = results_df["new_gm"].idxmax()
    optimal = results_df.iloc[optimal_idx]

    current_gm = current_price * current_qty - current_cost * current_qty

    return {
        "current_discount": current_discount * 100,
        "optimal_discount": optimal["new_discount"],
        "discount_change": optimal["discount_change"],
        "current_price": current_price,
        "optimal_price": optimal["new_price"],
        "current_qty": current_qty,
        "optimal_qty": optimal["new_qty"],
        "current_gm": current_gm,
        "optimal_gm": optimal["new_gm"],
        "gm_uplift": optimal["new_gm"] - current_gm,
        "elasticity": elasticity,
        "all_scenarios": results_df
    }

# Main App
st.markdown('<h1 class="main-header">🎯 Daily Profitability Variance Analysis</h1>', unsafe_allow_html=True)
st.markdown('<p class="sub-header">Understanding What Drives Daily Margin Changes</p>', unsafe_allow_html=True)

# Generate data with realistic variance
with st.spinner("πŸ”„ Loading business data..."):
    df = generate_synthetic_data(days=60, seed=42, rows_per_day=600)

# Calculate daily aggregates
daily = df.groupby("date").agg(
    revenue=("revenue","sum"),
    cogs=("cogs","sum"),
    gm_value=("gm_value","sum"),
    qty=("qty","sum")
).reset_index()
daily["gm_pct"] = np.where(daily["revenue"]>0, daily["gm_value"]/daily["revenue"], 0.0)

current_date = daily["date"].max()
prior_date = current_date - timedelta(days=1)
current_row = daily[daily["date"]==current_date].iloc[0]
prior_row = daily[daily["date"]==prior_date].iloc[0]
week_ago_row = daily.iloc[-8] if len(daily) > 7 else current_row
roll7 = daily["gm_pct"].tail(7).mean()

gm_variance_pp = (current_row["gm_pct"] - prior_row["gm_pct"]) * 100
gm_variance_dollar = current_row["gm_value"] - prior_row["gm_value"]

# Executive Dashboard
st.markdown("### πŸ“Š Executive Summary")

col1, col2, col3, col4 = st.columns(4)

with col1:
    st.metric(
        label="Gross Margin %",
        value=f"{current_row['gm_pct']*100:.2f}%",
        delta=f"{gm_variance_pp:+.2f}pp",
        delta_color="normal"
    )

with col2:
    st.metric(
        label="Gross Margin $",
        value=f"${current_row['gm_value']/1e6:.2f}M",
        delta=f"${gm_variance_dollar/1e6:+.2f}M",
        delta_color="normal"
    )

with col3:
    revenue_var_pct = ((current_row["revenue"] - prior_row["revenue"]) / prior_row["revenue"] * 100) if prior_row["revenue"] > 0 else 0
    st.metric(
        label="Revenue",
        value=f"${current_row['revenue']/1e6:.2f}M",
        delta=f"{revenue_var_pct:+.1f}%",
        delta_color="normal"
    )

with col4:
    volume_var_pct = ((current_row["qty"] - prior_row["qty"]) / prior_row["qty"] * 100) if prior_row["qty"] > 0 else 0
    st.metric(
        label="Volume (Units)",
        value=f"{current_row['qty']:,.0f}",
        delta=f"{volume_var_pct:+.1f}%",
        delta_color="normal"
    )

# Trend chart with REAL variance
st.markdown("#### πŸ“ˆ Gross Margin Trend (Last 30 Days)")
recent_daily = daily.tail(30)

fig_trend = go.Figure()
fig_trend.add_trace(go.Scatter(
    x=recent_daily["date"],
    y=recent_daily["gm_pct"]*100,
    mode='lines+markers',
    name="GM%",
    line=dict(color="#1f77b4", width=3),
    fill='tozeroy',
    fillcolor="rgba(31, 119, 180, 0.1)"
))
fig_trend.add_hline(y=roll7*100, line_dash="dash", line_color="red", 
                   annotation_text=f"7-Day Avg: {roll7*100:.2f}%", annotation_position="right")
fig_trend.update_layout(
    xaxis_title="Date",
    yaxis_title="Gross Margin %",
    height=350,
    hovermode="x unified"
)
st.plotly_chart(fig_trend, use_container_width=True)

st.markdown("---")

# Perform margin bridge analysis
with st.spinner("πŸ”¬ Performing Price-Volume-Mix analysis..."):
    variance_detail, summary = analyze_margin_bridge(df, current_date, prior_date)

# Main Tabs
tab1, tab2, tab3 = st.tabs(["πŸ“Š Margin Bridge (PVM)", "πŸ” Segment Deep Dive", "πŸ’‘ Optimal Pricing"])

with tab1:
    st.markdown(f"### Gross Margin Bridge: {prior_date.strftime('%b %d')} β†’ {current_date.strftime('%b %d')}")

    st.markdown(f"""
    <div class="insight-box">
    <b>πŸ“‹ Variance Summary:</b><br>
    Gross margin changed by <b>${gm_variance_dollar/1000:+.1f}K</b> ({gm_variance_pp:+.2f} percentage points)<br>
    from {prior_row['gm_pct']*100:.2f}% to {current_row['gm_pct']*100:.2f}%
    </div>
    """, unsafe_allow_html=True)

    # Waterfall Chart
    st.markdown("#### Price-Volume-Mix (PVM) Waterfall Analysis")

    waterfall_data = pd.DataFrame({
        "Category": [
            f"{prior_date.strftime('%b %d')}<br>Gross Margin",
            "Price<br>Effect",
            "Volume<br>Effect",
            "Cost<br>Effect",
            "Mix<br>Effect",
            f"{current_date.strftime('%b %d')}<br>Gross Margin"
        ],
        "Value": [
            summary["prior_gm"],
            summary["price_effect_total"],
            summary["volume_effect_total"],
            summary["cost_effect_total"],
            summary["mix_effect_total"],
            summary["current_gm"]
        ],
        "Type": ["absolute", "relative", "relative", "relative", "relative", "total"]
    })

    fig_waterfall = go.Figure(go.Waterfall(
        orientation="v",
        measure=waterfall_data["Type"],
        x=waterfall_data["Category"],
        y=waterfall_data["Value"],
        text=[f"${v/1000:.1f}K" if abs(v) > 100 else f"${v:.0f}" for v in waterfall_data["Value"]],
        textposition="outside",
        connector={"line": {"color": "rgb(63, 63, 63)"}},
        increasing={"marker": {"color": "#28a745"}},
        decreasing={"marker": {"color": "#dc3545"}},
        totals={"marker": {"color": "#1f77b4"}}
    ))

    fig_waterfall.update_layout(
        title="Gross Margin Variance Breakdown",
        showlegend=False,
        height=450,
        yaxis_title="Gross Margin ($)"
    )
    st.plotly_chart(fig_waterfall, use_container_width=True)

    # Explanations
    col_exp1, col_exp2 = st.columns(2)

    with col_exp1:
        st.markdown(f"""
        <div class="insight-box">
        <b>πŸ’° Price Effect:</b> ${summary['price_effect_total']/1000:+.1f}K<br>
        <small>Impact of changes in realized selling prices</small>
        </div>
        """, unsafe_allow_html=True)

        st.markdown(f"""
        <div class="insight-box">
        <b>πŸ“¦ Volume Effect:</b> ${summary['volume_effect_total']/1000:+.1f}K<br>
        <small>Impact of selling more/fewer units</small>
        </div>
        """, unsafe_allow_html=True)

    with col_exp2:
        st.markdown(f"""
        <div class="insight-box">
        <b>🏭 Cost Effect:</b> ${summary['cost_effect_total']/1000:+.1f}K<br>
        <small>Impact of changes in unit costs</small>
        </div>
        """, unsafe_allow_html=True)

        st.markdown(f"""
        <div class="insight-box">
        <b>πŸ”€ Mix Effect:</b> ${summary['mix_effect_total']/1000:+.1f}K<br>
        <small>Impact of product/channel mix shifts</small>
        </div>
        """, unsafe_allow_html=True)

with tab2:
    st.markdown("### Segment-Level Variance Analysis")

    variance_detail_sorted = variance_detail.sort_values("gm_variance", ascending=False)

    col_seg1, col_seg2 = st.columns(2)

    with col_seg1:
        st.markdown("#### πŸ“ˆ Top 5 Margin Gainers")
        for _, row in variance_detail_sorted.head(5).iterrows():
            if row["gm_variance"] > 0:
                st.markdown(f"""
                <div class="recommendation-card" style="border-left: 4px solid #28a745;">
                <b>{row['product']}</b><br>
                <small>{row['region']} β€’ {row['channel']}</small><br>
                <span class="positive-impact">+${row['gm_variance']:.2f}</span><br>
                <small>Price: ${row['price_effect']:+.2f} | Volume: ${row['volume_effect']:+.2f} | Cost: ${row['cost_effect']:+.2f}</small>
                </div>
                """, unsafe_allow_html=True)

    with col_seg2:
        st.markdown("#### πŸ“‰ Top 5 Margin Losers")
        for _, row in variance_detail_sorted.tail(5).iterrows():
            if row["gm_variance"] < 0:
                st.markdown(f"""
                <div class="recommendation-card" style="border-left: 4px solid #dc3545;">
                <b>{row['product']}</b><br>
                <small>{row['region']} β€’ {row['channel']}</small><br>
                <span class="negative-impact">${row['gm_variance']:.2f}</span><br>
                <small>Price: ${row['price_effect']:+.2f} | Volume: ${row['volume_effect']:+.2f} | Cost: ${row['cost_effect']:+.2f}</small>
                </div>
                """, unsafe_allow_html=True)

with tab3:
    st.markdown("### Optimal Pricing Analysis")
    st.markdown("""
    <div class="insight-box">
    <b>🎯 Profit Maximization:</b> Using price elasticity of demand to find the optimal discount level.
    <br>May recommend <b>increasing</b> or <b>decreasing</b> discount depending on elasticity.
    </div>
    """, unsafe_allow_html=True)

    # Get segments with meaningful volume
    recent_segments = df[df["date"] >= (current_date - timedelta(days=7))].groupby(["product", "region", "channel"]).agg({
        "qty": "sum",
        "gm_value": "sum"
    }).reset_index()
    recent_segments = recent_segments[recent_segments["qty"] > 100]  # Minimum volume threshold

    optimization_results = []

    for _, seg in recent_segments.iterrows():
        p, r, c = seg["product"], seg["region"], seg["channel"]
        hist = df[(df["product"]==p)&(df["region"]==r)&(df["channel"]==c)].sort_values("date")

        if hist.empty or len(hist) < 100:
            continue

        elasticity, is_valid = estimate_segment_elasticity(hist, p, r, c)

        if not is_valid:
            continue

        current_state = hist.iloc[-1]
        optimal_result = find_optimal_discount(current_state, elasticity)

        if abs(optimal_result["gm_uplift"]) > 5:  # Only show meaningful opportunities
            optimization_results.append({
                "Product": p,
                "Region": r,
                "Channel": c,
                "Current Discount": optimal_result["current_discount"],
                "Optimal Discount": optimal_result["optimal_discount"],
                "Discount Change": optimal_result["discount_change"],
                "Price Elasticity": elasticity,
                "Current GM/Day": optimal_result["current_gm"],
                "Optimal GM/Day": optimal_result["optimal_gm"],
                "Daily GM Uplift": optimal_result["gm_uplift"],
                "Direction": "Increase Discount" if optimal_result["discount_change"] > 0 else "Decrease Discount",
                "all_scenarios": optimal_result["all_scenarios"]
            })

    opt_df = pd.DataFrame(optimization_results).sort_values("Daily GM Uplift", ascending=False)

    if len(opt_df) > 0:
        st.markdown("#### πŸ† Top 5 Optimization Opportunities")

        for i, (_, rec) in enumerate(opt_df.head(5).iterrows()):
            direction_color = "#ff7f0e" if rec["Direction"] == "Increase Discount" else "#1f77b4"

            st.markdown(f"""
            <div class="recommendation-card" style="border-left: 5px solid {direction_color};">
                <h4>#{i+1}: {rec['Product']} β€’ {rec['Region']} β€’ {rec['Channel']}</h4>
                <p><b>Elasticity:</b> {rec['Price Elasticity']:.2f} ({"Elastic" if rec['Price Elasticity'] < -1.5 else "Inelastic"})</p>
                <p><b>Recommendation:</b> {rec['Direction']} by {abs(rec['Discount Change']):.1f}pp</p>
                <p><small>Current: {rec['Current Discount']:.1f}% β†’ Optimal: {rec['Optimal Discount']:.1f}%</small></p>
                <p class="positive-impact">πŸ’° Expected Uplift: ${rec['Daily GM Uplift']:.2f}/day</p>
                <p><small>Annual Impact: ${rec['Daily GM Uplift']*365/1000:.1f}K</small></p>
            </div>
            """, unsafe_allow_html=True)

            # Show elasticity curve
            with st.expander(f"πŸ“Š View Profit Curve for {rec['Product']} β€’ {rec['Region']} β€’ {rec['Channel']}"):
                scenario_df = rec["all_scenarios"]

                fig_curve = go.Figure()
                fig_curve.add_trace(go.Scatter(
                    x=scenario_df["new_discount"],
                    y=scenario_df["new_gm"],
                    mode='lines',
                    name='Gross Margin',
                    line=dict(color='#1f77b4', width=3)
                ))

                fig_curve.add_vline(x=rec["Current Discount"], line_dash="dash", line_color="red",
                                   annotation_text=f"Current: {rec['Current Discount']:.1f}%")
                fig_curve.add_vline(x=rec["Optimal Discount"], line_dash="dash", line_color="green",
                                   annotation_text=f"Optimal: {rec['Optimal Discount']:.1f}%")

                fig_curve.update_layout(
                    title=f"Profit Maximization Curve (Elasticity: {rec['Price Elasticity']:.2f})",
                    xaxis_title="Discount Level (%)",
                    yaxis_title="Expected Gross Margin ($)",
                    height=400
                )
                st.plotly_chart(fig_curve, use_container_width=True)

        st.markdown("---")
        st.markdown("#### Complete Optimization List")

        display_opt = opt_df[[
            "Product", "Region", "Channel", "Current Discount", "Optimal Discount",
            "Discount Change", "Price Elasticity", "Daily GM Uplift", "Direction"
        ]].copy()

        st.dataframe(display_opt.style.format({
            "Current Discount": "{:.1f}%",
            "Optimal Discount": "{:.1f}%",
            "Discount Change": "{:+.1f}pp",
            "Price Elasticity": "{:.2f}",
            "Daily GM Uplift": "${:,.2f}"
        }).background_gradient(subset=["Daily GM Uplift"], cmap="Greens"),
        use_container_width=True, height=400)

        st.download_button(
            label="πŸ“₯ Download Optimization Plan (CSV)",
            data=opt_df.drop(columns=["all_scenarios"]).to_csv(index=False).encode("utf-8"),
            file_name=f"optimal_pricing_plan_{current_date.strftime('%Y%m%d')}.csv",
            mime="text/csv"
        )
    else:
        st.info("All segments are currently near optimal pricing levels.")

st.markdown("---")
st.markdown("""
<div style="text-align: center; color: #666; padding: 1rem;">
    <small>πŸ”’ Demo Mode: Using synthetic transaction data with realistic variance patterns</small>
</div>
""", unsafe_allow_html=True)