diff --git a/marimo/superchain-chainlink-case.py b/marimo/superchain-chainlink-case.py
new file mode 100644
index 00000000..a414b781
--- /dev/null
+++ b/marimo/superchain-chainlink-case.py
@@ -0,0 +1,633 @@
+import marimo
+
+__generated_with = "0.15.3"
+app = marimo.App(width="full")
+
+
+@app.cell
+def setup_pyoso():
+ # This code sets up pyoso to be used as a database provider for this notebook
+ # This code is autogenerated. Modification could lead to unexpected results :)
+ import marimo as mo
+ from pyoso import Client
+ client = Client()
+ try:
+ pyoso_db_conn = client.dbapi_connection()
+ except Exception as e:
+ pyoso_db_conn = None
+ return client, mo
+
+
+@app.cell
+def about_app(mo):
+ _author = 'OSO Team'
+ _updated_at = '2025-09-19'
+ mo.vstack([
+ mo.md(f"""
+ # Chainlink Impact Analysis on Superchain
+ Author: {_author} ยท Last Updated: {_updated_at}
+ """),
+ mo.md("""
+ This dashboard analyzes Chainlink's impact on the Superchain ecosystem across multiple dimensions: direct contract interactions, downstream transaction fee generation, implied risk reduction, and pre-post revenue analysis. The analysis provides a comprehensive view of how Chainlink's oracle infrastructure contributes to the success and security of the Superchain ecosystem.
+ """),
+ mo.accordion({
+ "Click to see details on how app was made": mo.accordion({
+ "Methodology": """
+ - **Direct Contract Interactions**: Calculate cumulative transaction fees from direct interactions with Chainlink price feed contracts
+ - **Downstream Impact**: Attribute a percentage of transaction fees from apps using Chainlink oracles to Chainlink's contribution
+ - **Implied Risk Reduction**: Use historical oracle manipulation data to estimate avoided losses through Chainlink integration
+ - **Pre-Post Revenue Analysis**: Compare chain revenues before and after Chainlink integration with synthetic control methodology
+ """,
+ "Data Sources": """
+ - [OSS Directory](https://github.com/opensource-observer/oss-directory)
+ - [Pyoso API](https://docs.opensource.observer/docs/get-started/python)
+ - Chainlink contract interaction data
+ - Historical oracle exploit data
+ - Superchain transaction and revenue metrics
+ """,
+ "Further Resources": """
+ - [Getting Started with Pyoso](https://docs.opensource.observer/docs/get-started/python)
+ - [Using the Semantic Layer](https://docs.opensource.observer/docs/get-started/using-semantic-layer)
+ - [Marimo Documentation](https://docs.marimo.io/)
+ - [Chainlink Documentation](https://docs.chain.link/)
+ """
+ })
+ })
+ ])
+ return
+
+
+@app.cell
+def import_libraries():
+ import pandas as pd
+ import plotly.express as px
+ import plotly.graph_objects as go
+ from datetime import datetime, timedelta
+ import numpy as np
+ return pd, px, timedelta
+
+
+@app.cell
+def configuration_settings(mo):
+ # Chain selection
+ CHAINS = sorted([
+ 'OPTIMISM', 'ZORA', 'BASE', 'MODE', 'METAL',
+ 'BOB', 'LISK', 'MINT', 'POLYNOMIAL', 'WORLDCHAIN',
+ 'RACE', 'SHAPE', 'UNICHAIN', 'SWELL', 'SONEIUM',
+ 'INK', 'ARENAZ'
+ ])
+
+ chain_filter_input = mo.ui.multiselect(
+ options=CHAINS,
+ value=CHAINS,
+ label="Filter by Chain",
+ full_width=True
+ )
+
+ # Time range selection
+ time_range_input = mo.ui.dropdown(
+ options=["All Time", "YTD", "Last 6 Months", "Last 12 Months"],
+ value="YTD",
+ label="Time Range",
+ full_width=True
+ )
+
+ # Attribution percentage for downstream impact
+ attribution_pct_input = mo.ui.slider(
+ start=0.0,
+ stop=50.0,
+ step=1.0,
+ value=15.0,
+ label="Downstream Attribution %",
+ full_width=True
+ )
+
+ # Risk reduction assumptions
+ risk_reduction_input = mo.ui.slider(
+ start=0.0,
+ stop=100.0,
+ step=5.0,
+ value=80.0,
+ label="Estimated Risk Reduction %",
+ full_width=True
+ )
+
+ mo.vstack([
+ mo.md("### Configuration"),
+ chain_filter_input,
+ time_range_input,
+ mo.md("### Analysis Parameters"),
+ attribution_pct_input,
+ risk_reduction_input
+ ])
+ return (
+ attribution_pct_input,
+ chain_filter_input,
+ risk_reduction_input,
+ time_range_input,
+ )
+
+
+@app.cell
+def _():
+ _query_contracts = """
+ SELECT
+ artifact_name AS contract_address,
+ artifact_source AS chain,
+ amount AS contract_invocations
+ FROM artifacts_by_project_v1
+ JOIN key_metrics_by_artifact_v0 USING artifact_id
+ JOIN metrics_v0 USING metric_id
+ WHERE
+ project_name = 'chainlink'
+ AND project_source = 'OSS_DIRECTORY'
+ AND metric_name LIKE '%_contract_invocations_over_all_time'
+ ORDER BY 3 DESC
+ LIMIT 50
+ """
+ #df_contracts = client.to_pandas(_query_contracts)
+ return
+
+
+@app.cell
+def _(df_contracts):
+ df_contracts
+ return
+
+
+@app.cell
+def _(client, pd):
+ _query_usage = """
+ SELECT
+ sample_date,
+ regexp_extract(metric_name, '^(.*)_contract_invocations_daily', 1) AS chain,
+ amount
+ FROM timeseries_metrics_by_project_v0
+ JOIN projects_v1 USING project_id
+ JOIN metrics_v0 USING metric_id
+ WHERE
+ project_name = 'chainlink'
+ AND project_source = 'OSS_DIRECTORY'
+ AND metric_name LIKE '%_contract_invocations_daily'
+ ORDER BY 1
+ """
+ df_usage = client.to_pandas(_query_usage)
+ df_usage['sample_date'] = pd.to_date(df_usage['sample_date'])
+ return (df_usage,)
+
+
+@app.cell
+def _(client, pd):
+ _query_revenue = """
+ SELECT
+ sample_date,
+ regexp_extract(metric_name, '^(.*)_layer2_gas_fees_amortized_daily', 1) AS chain,
+ amount
+ FROM timeseries_metrics_by_project_v0
+ JOIN projects_v1 USING project_id
+ JOIN metrics_v0 USING metric_id
+ WHERE
+ project_name = 'chainlink'
+ AND project_source = 'OSS_DIRECTORY'
+ AND metric_name LIKE '%_layer2_gas_fees_amortized_daily'
+ ORDER BY 1
+ """
+ df_revenue = client.to_pandas(_query_revenue)
+ df_revenue['sample_date'] = pd.to_date(df_revenue['sample_date'])
+ return (df_revenue,)
+
+
+@app.cell
+def get_historical_exploit_data(pd):
+ # Historical oracle exploit data (dummy data for demonstration)
+ exploit_data = [
+ {'date': '2022-03-01', 'chain': 'ETHEREUM', 'protocol': 'Protocol A', 'loss_amount_eth': 1000, 'oracle_type': 'Custom'},
+ {'date': '2022-06-15', 'chain': 'BSC', 'protocol': 'Protocol B', 'loss_amount_eth': 500, 'oracle_type': 'Custom'},
+ {'date': '2022-09-20', 'chain': 'POLYGON', 'protocol': 'Protocol C', 'loss_amount_eth': 2000, 'oracle_type': 'Custom'},
+ {'date': '2023-01-10', 'chain': 'AVALANCHE', 'protocol': 'Protocol D', 'loss_amount_eth': 800, 'oracle_type': 'Custom'},
+ {'date': '2023-04-05', 'chain': 'ARBITRUM', 'protocol': 'Protocol E', 'loss_amount_eth': 1500, 'oracle_type': 'Custom'},
+ ]
+
+ df_exploits = pd.DataFrame(exploit_data)
+ df_exploits['date'] = pd.to_datetime(df_exploits['date'])
+ return (df_exploits,)
+
+
+@app.cell
+def filter_data(
+ chain_filter_input,
+ df_contracts,
+ df_revenue,
+ df_usage,
+ pd,
+ time_range_input,
+ timedelta,
+):
+ # Filter by selected chains
+ df_contracts_filtered = df_contracts[df_contracts['chain'].isin(chain_filter_input.value)].copy()
+ df_usage_filtered = df_usage[df_usage['chain'].isin(chain_filter_input.value)].copy()
+ df_revenue_filtered = df_revenue[df_revenue['chain'].isin(chain_filter_input.value)].copy()
+
+ # Filter by time range
+ if time_range_input.value == "YTD":
+ cutoff_date = pd.Timestamp('2024-01-01')
+ elif time_range_input.value == "Last 6 Months":
+ cutoff_date = pd.Timestamp.now() - timedelta(days=180)
+ elif time_range_input.value == "Last 12 Months":
+ cutoff_date = pd.Timestamp.now() - timedelta(days=365)
+ else:
+ cutoff_date = None
+
+ if cutoff_date:
+ df_usage_filtered = df_usage_filtered[df_usage_filtered['sample_date'] >= cutoff_date]
+ df_revenue_filtered = df_revenue_filtered[df_revenue_filtered['sample_date'] >= cutoff_date]
+
+ return df_contracts_filtered, df_revenue_filtered, df_usage_filtered
+
+
+@app.cell
+def generate_direct_impact_analysis(
+ df_revenue_filtered,
+ df_usage_filtered,
+ mo,
+ px,
+):
+
+
+ # Calculate total direct fees
+ total_direct_fees = df_revenue_filtered['amount'].sum()
+ total_interactions = df_usage_filtered['amount'].sum()
+
+ # Create summary stats
+ direct_fees_stat = mo.stat(
+ label="Total Direct Fees (ETH)",
+ bordered=True,
+ value=f"{total_direct_fees:.2f}",
+ )
+
+ interactions_stat = mo.stat(
+ label="Total Interactions",
+ bordered=True,
+ value=f"{total_interactions:,}",
+ )
+
+ # Create time series plot
+ df_daily = df_revenue_filtered.groupby('sample_date')['amount'].sum().reset_index()
+
+ def make_direct_fees_plot(dataframe, title=""):
+ fig = px.line(
+ dataframe,
+ x="sample_date",
+ y="amount",
+ title=f"{title}"
+ )
+ fig.update_layout(
+ paper_bgcolor="white",
+ plot_bgcolor="white",
+ font=dict(size=12, color="#111"),
+ title=dict(text=title, x=0, xanchor="left"),
+ margin=dict(t=50, l=20, r=20, b=20),
+ legend_title="",
+ hovermode="x"
+ )
+ fig.update_xaxes(showgrid=False, linecolor="#000", ticks="outside")
+ fig.update_yaxes(showgrid=True, gridcolor="#DDD", linecolor="#000", ticks="outside")
+ return fig
+
+ _fig = make_direct_fees_plot(df_daily, "Daily Direct Contract Interaction Fees")
+
+ mo.vstack([
+ mo.md("## 1. Direct Contract Interactions"),
+ mo.hstack([direct_fees_stat, interactions_stat], widths="equal", gap=1),
+ mo.md("""
+ **Analysis**: This section shows the direct transaction fees generated from interactions with Chainlink price feed contracts.
+ These represent the most straightforward measure of Chainlink's direct economic impact on the Superchain ecosystem.
+ """),
+ mo.ui.plotly(_fig)
+ ])
+ return
+
+
+@app.cell
+def _(df_usage):
+ df_usage
+ return
+
+
+@app.cell
+def generate_downstream_impact_analysis(
+ attribution_pct_input,
+ df_apps_filtered,
+ mo,
+ px,
+):
+ mo.md("## 2. Downstream Impact on Transaction Fees")
+
+ # Calculate attributed fees
+ total_app_fees = df_apps_filtered['transaction_fees_eth'].sum()
+ attributed_fees = total_app_fees * (attribution_pct_input.value / 100)
+
+ # Create summary stats
+ total_fees_stat = mo.stat(
+ label="Total App Fees (ETH)",
+ bordered=True,
+ value=f"{total_app_fees:.2f}",
+ )
+
+ attributed_stat = mo.stat(
+ label=f"Attributed to Chainlink ({attribution_pct_input.value}%)",
+ bordered=True,
+ value=f"{attributed_fees:.2f}",
+ )
+
+ mo.hstack([total_fees_stat, attributed_stat], widths="equal", gap=1)
+
+ # Create chain breakdown
+ df_chain_breakdown = df_apps_filtered.groupby('chain')['transaction_fees_eth'].sum().reset_index()
+ df_chain_breakdown['attributed_fees'] = df_chain_breakdown['transaction_fees_eth'] * (attribution_pct_input.value / 100)
+
+ def make_chain_breakdown_plot(dataframe, title=""):
+ fig = px.bar(
+ dataframe,
+ x="chain",
+ y=["transaction_fees_eth", "attributed_fees"],
+ title=f"{title}",
+ barmode="group"
+ )
+ fig.update_layout(
+ paper_bgcolor="white",
+ plot_bgcolor="white",
+ font=dict(size=12, color="#111"),
+ title=dict(text=title, x=0, xanchor="left"),
+ margin=dict(t=50, l=20, r=20, b=20),
+ legend_title="",
+ hovermode="x"
+ )
+ fig.update_xaxes(showgrid=False, linecolor="#000", ticks="outside")
+ fig.update_yaxes(showgrid=True, gridcolor="#DDD", linecolor="#000", ticks="outside")
+ return fig
+
+ _fig = make_chain_breakdown_plot(df_chain_breakdown, "Transaction Fees by Chain")
+ mo.ui.plotly(_fig)
+
+ mo.md(f"""
+ **Analysis**: This section attributes {attribution_pct_input.value}% of transaction fees from applications using Chainlink oracles to Chainlink's contribution.
+ This represents the second-order economic impact, assuming that Chainlink's reliable oracle infrastructure contributes to the success and legitimacy of these applications.
+ """)
+ return
+
+
+@app.cell
+def generate_risk_reduction_analysis(
+ df_exploits,
+ mo,
+ px,
+ risk_reduction_input,
+):
+ mo.md("## 3. Implied Risk Reduction")
+
+ # Calculate potential avoided losses
+ total_historical_losses = df_exploits['loss_amount_eth'].sum()
+ avg_loss_per_exploit = df_exploits['loss_amount_eth'].mean()
+
+ # Estimate avoided losses (simplified calculation)
+ estimated_avoided_losses = total_historical_losses * (risk_reduction_input.value / 100)
+
+ # Create summary stats
+ historical_losses_stat = mo.stat(
+ label="Historical Oracle Losses (ETH)",
+ bordered=True,
+ value=f"{total_historical_losses:,.0f}",
+ )
+
+ avoided_losses_stat = mo.stat(
+ label=f"Estimated Avoided Losses ({risk_reduction_input.value}%)",
+ bordered=True,
+ value=f"{estimated_avoided_losses:,.0f}",
+ )
+
+ mo.hstack([historical_losses_stat, avoided_losses_stat], widths="equal", gap=1)
+
+ # Create exploit timeline
+ def make_exploit_timeline(dataframe, title=""):
+ fig = px.bar(
+ dataframe,
+ x="date",
+ y="loss_amount_eth",
+ color="chain",
+ title=f"{title}"
+ )
+ fig.update_layout(
+ paper_bgcolor="white",
+ plot_bgcolor="white",
+ font=dict(size=12, color="#111"),
+ title=dict(text=title, x=0, xanchor="left"),
+ margin=dict(t=50, l=20, r=20, b=20),
+ legend_title="Chain",
+ hovermode="x"
+ )
+ fig.update_xaxes(showgrid=False, linecolor="#000", ticks="outside")
+ fig.update_yaxes(showgrid=True, gridcolor="#DDD", linecolor="#000", ticks="outside")
+ return fig
+
+ _fig = make_exploit_timeline(df_exploits, "Historical Oracle Exploits by Chain")
+ mo.ui.plotly(_fig)
+
+ mo.md(f"""
+ **Analysis**: This section estimates the value of risk reduction provided by Chainlink's secure oracle infrastructure.
+ Based on historical oracle manipulation exploits on other chains, we estimate that Chainlink integration has prevented
+ approximately {risk_reduction_input.value}% of potential losses that would have occurred with less secure oracle solutions.
+ """)
+ return
+
+
+@app.cell
+def generate_pre_post_analysis(df_revenue_filtered, mo, pd, px, timedelta):
+ mo.md("## 4. Pre-Post Revenue Analysis")
+
+ # Simulate Chainlink integration dates (dummy data)
+ integration_dates = {
+ 'OPTIMISM': '2023-06-01',
+ 'BASE': '2023-08-15',
+ 'ZORA': '2023-10-01',
+ 'MODE': '2024-01-01'
+ }
+
+ # Calculate pre/post metrics for each chain
+ analysis_results = []
+
+ for chain in df_revenue_filtered['chain'].unique():
+ if chain in integration_dates:
+ integration_date = pd.Timestamp(integration_dates[chain])
+ chain_data = df_revenue_filtered[df_revenue_filtered['chain'] == chain].copy()
+
+ # Pre-integration period (6 months before)
+ pre_start = integration_date - timedelta(days=180)
+ pre_data = chain_data[(chain_data['sample_date'] >= pre_start) & (chain_data['sample_date'] < integration_date)]
+
+ # Post-integration period (6 months after)
+ post_end = integration_date + timedelta(days=180)
+ post_data = chain_data[(chain_data['sample_date'] >= integration_date) & (chain_data['sample_date'] <= post_end)]
+
+ if len(pre_data) > 0 and len(post_data) > 0:
+ pre_avg = pre_data['total_revenue_eth'].mean()
+ post_avg = post_data['total_revenue_eth'].mean()
+ growth_pct = ((post_avg - pre_avg) / pre_avg) * 100
+
+ analysis_results.append({
+ 'chain': chain,
+ 'integration_date': integration_date,
+ 'pre_avg_revenue': pre_avg,
+ 'post_avg_revenue': post_avg,
+ 'growth_percentage': growth_pct
+ })
+
+ df_analysis = pd.DataFrame(analysis_results)
+
+ if len(df_analysis) > 0:
+ # Create summary stats
+ avg_growth_stat = mo.stat(
+ label="Average Revenue Growth %",
+ bordered=True,
+ value=f"{df_analysis['growth_percentage'].mean():.1f}%",
+ )
+
+ total_chains_stat = mo.stat(
+ label="Chains Analyzed",
+ bordered=True,
+ value=f"{len(df_analysis)}",
+ )
+
+ mo.hstack([avg_growth_stat, total_chains_stat], widths="equal", gap=1)
+
+ # Create growth comparison chart
+ def make_growth_plot(dataframe, title=""):
+ fig = px.bar(
+ dataframe,
+ x="chain",
+ y="growth_percentage",
+ title=f"{title}",
+ color="growth_percentage",
+ color_continuous_scale="RdYlGn"
+ )
+ fig.update_layout(
+ paper_bgcolor="white",
+ plot_bgcolor="white",
+ font=dict(size=12, color="#111"),
+ title=dict(text=title, x=0, xanchor="left"),
+ margin=dict(t=50, l=20, r=20, b=20),
+ legend_title="",
+ hovermode="x"
+ )
+ fig.update_xaxes(showgrid=False, linecolor="#000", ticks="outside")
+ fig.update_yaxes(showgrid=True, gridcolor="#DDD", linecolor="#000", ticks="outside")
+ return fig
+
+ _fig = make_growth_plot(df_analysis, "Revenue Growth After Chainlink Integration")
+ mo.ui.plotly(_fig)
+
+ # Show detailed table
+ mo.ui.table(
+ df_analysis.round(2),
+ selection=None,
+ show_column_summaries=False,
+ show_data_types=False,
+ page_size=10
+ )
+
+ mo.md("""
+ **Analysis**: This section compares chain revenues before and after Chainlink integration to estimate the revenue lift
+ attributable to improved oracle infrastructure. The analysis uses a 6-month pre/post comparison window and shows the
+ percentage change in average daily revenue after integration.
+ """)
+ return
+
+
+@app.cell
+def generate_summary_dashboard(
+ attribution_pct_input,
+ df_apps_filtered,
+ df_contracts_filtered,
+ mo,
+ pd,
+ px,
+ risk_reduction_input,
+):
+ mo.md("## Summary Dashboard")
+
+ # Calculate total impact
+ direct_fees = df_contracts_filtered['transaction_fees_eth'].sum()
+ downstream_fees = df_apps_filtered['transaction_fees_eth'].sum() * (attribution_pct_input.value / 100)
+
+ # Estimate risk reduction value (simplified)
+ estimated_risk_value = 10000 * (risk_reduction_input.value / 100) # Placeholder calculation
+
+ total_impact = direct_fees + downstream_fees + estimated_risk_value
+
+ # Create impact breakdown
+ impact_data = [
+ {'Category': 'Direct Fees', 'Value': direct_fees, 'Percentage': (direct_fees / total_impact) * 100},
+ {'Category': 'Downstream Attribution', 'Value': downstream_fees, 'Percentage': (downstream_fees / total_impact) * 100},
+ {'Category': 'Risk Reduction', 'Value': estimated_risk_value, 'Percentage': (estimated_risk_value / total_impact) * 100}
+ ]
+
+ df_impact = pd.DataFrame(impact_data)
+
+ # Summary stats
+ total_impact_stat = mo.stat(
+ label="Total Estimated Impact (ETH)",
+ bordered=True,
+ value=f"{total_impact:.2f}",
+ )
+
+ direct_impact_stat = mo.stat(
+ label="Direct Impact (ETH)",
+ bordered=True,
+ value=f"{direct_fees:.2f}",
+ )
+
+ downstream_impact_stat = mo.stat(
+ label="Downstream Impact (ETH)",
+ bordered=True,
+ value=f"{downstream_fees:.2f}",
+ )
+
+ mo.hstack([total_impact_stat, direct_impact_stat, downstream_impact_stat], widths="equal", gap=1)
+
+ # Impact breakdown pie chart
+ def make_impact_pie(dataframe, title=""):
+ fig = px.pie(
+ dataframe,
+ values='Value',
+ names='Category',
+ title=f"{title}"
+ )
+ fig.update_layout(
+ paper_bgcolor="white",
+ plot_bgcolor="white",
+ font=dict(size=12, color="#111"),
+ title=dict(text=title, x=0, xanchor="left"),
+ margin=dict(t=50, l=20, r=20, b=20),
+ legend_title="",
+ hovermode="x"
+ )
+ return fig
+
+ _fig = make_impact_pie(df_impact, "Chainlink Impact Breakdown")
+ mo.ui.plotly(_fig)
+
+ mo.md("""
+ **Summary**: This dashboard provides a comprehensive view of Chainlink's impact on the Superchain ecosystem across multiple dimensions.
+ The analysis combines direct economic contributions, downstream attribution effects, and estimated risk reduction value to quantify
+ the overall value created by Chainlink's oracle infrastructure.
+ """)
+ return
+
+
+@app.cell
+def _():
+ return
+
+
+if __name__ == "__main__":
+ app.run()