diff --git a/README.md b/README.md new file mode 100644 index 0000000..6a670ce --- /dev/null +++ b/README.md @@ -0,0 +1,97 @@ +# Admin Analytics + +University of Delaware administrative cost benchmarking using public data (IRS 990, IPEDS, BLS CPI-U). Ingests data into a local DuckDB database and serves an interactive Dash dashboard for analysis. + +## Prerequisites + +- Python 3.11+ +- [uv](https://docs.astral.sh/uv/) package manager +- Playwright browsers (only needed for the `scrape` command) + +## Setup + +```bash +# Clone and install +git clone +cd AdminAnalytics +uv sync + +# Install Playwright browsers (optional, only for scraping) +uv run playwright install chromium +``` + +## Ingesting Data + +Load data from public sources into the local DuckDB database (`data/admin_analytics.duckdb`). + +```bash +# Ingest everything (IPEDS + IRS 990 + CPI + scraper) +uv run admin-analytics ingest all + +# Or ingest individual sources +uv run admin-analytics ingest ipeds --year-range 2005-2024 +uv run admin-analytics ingest irs990 --year-range 2019-2024 +uv run admin-analytics ingest cpi +uv run admin-analytics ingest scrape +``` + +Use `--force` on any command to re-download files that already exist locally. + +Downloaded files are stored in `data/raw/` (gitignored). + +## Launching the Dashboard + +```bash +uv run admin-analytics dashboard +``` + +Opens at [http://localhost:8050](http://localhost:8050). Use `--port` to change the port. + +The dashboard has four tabs: + +- **Admin Cost Overview** -- admin cost ratios, expense breakdown by function, cost per student, admin-to-faculty ratio (IPEDS data, 2005-2024) +- **Executive Compensation** -- top earners from IRS 990 Schedule J, compensation trends by role, growth vs CPI-U (2017-2023) +- **Staffing & Enrollment** -- staff composition, student-to-staff ratios, management growth vs enrollment growth +- **Current Headcount** -- scraped UD staff directory data with overhead classification + +## Validating Data + +Check row counts, NULL rates, year coverage, and cross-source consistency: + +```bash +uv run admin-analytics validate +``` + +## Running Tests + +```bash +uv sync --group dev +uv run pytest +``` + +## Project Structure + +``` +src/admin_analytics/ + cli.py # CLI entry point (typer) + config.py # Constants (UD identifiers, URLs, paths) + db/ # DuckDB schema and connection + ipeds/ # IPEDS download, parsing, loading + irs990/ # IRS 990 XML download, parsing, title normalization + bls/ # BLS CPI-U download and loading + scraper/ # UD staff directory scraper and classifier + dashboard/ # Dash app, queries, page layouts + validation.py # Data validation queries +data/raw/ # Downloaded files (gitignored) +docs/data_dictionary.md # Schema documentation +tests/ # pytest test suite +``` + +## Data Sources + +| Source | What it provides | Years | +|--------|-----------------|-------| +| [IPEDS](https://nces.ed.gov/ipeds/) | Institutional directory, expenses by function, staffing, enrollment | 2005-2024 | +| [IRS 990 e-file](https://www.irs.gov/charities-non-profits/form-990-series-downloads) | UD Foundation filings, executive compensation (Schedule J) | 2017-2023 | +| [BLS CPI-U](https://www.bls.gov/cpi/) | Consumer Price Index for inflation adjustment | Full history | +| UD staff directories | Admin office headcounts and overhead classification | Current snapshot | diff --git a/docs/data_dictionary.md b/docs/data_dictionary.md new file mode 100644 index 0000000..4aae435 --- /dev/null +++ b/docs/data_dictionary.md @@ -0,0 +1,173 @@ +# Data Dictionary + +Raw data layer for University of Delaware administrative analytics. All tables are prefixed `raw_` and loaded into DuckDB. + +## Tables + +### raw_institution + +**Source:** IPEDS HD (Institutional Characteristics) survey +**Granularity:** One row per institution per year +**Primary Key:** (unitid, year) + +| Column | Type | Description | +|--------|------|-------------| +| unitid | INTEGER | IPEDS institution identifier | +| year | INTEGER | Survey year | +| ein | VARCHAR | IRS Employer Identification Number | +| institution_name | VARCHAR | Institution name | +| city | VARCHAR | City | +| state | VARCHAR | State abbreviation | +| sector | INTEGER | IPEDS sector code (public/private/for-profit) | +| control | INTEGER | IPEDS control code (1=public, 2=private nonprofit, 3=private for-profit) | +| carnegie_class | INTEGER | Carnegie Classification code | +| enrollment_total | INTEGER | Total enrollment from HD survey | + +### raw_ipeds_finance + +**Source:** IPEDS F1A (GASB public) and F2 (FASB private) finance surveys +**Granularity:** One row per institution per year +**Primary Key:** (unitid, year) +**Note:** UD reports under FASB (F2) despite being public. The loader tries both F1A and F2. + +| Column | Type | Description | +|--------|------|-------------| +| unitid | INTEGER | IPEDS institution identifier | +| year | INTEGER | Fiscal year | +| reporting_standard | VARCHAR | "GASB" or "FASB" | +| total_expenses | BIGINT | Total expenses | +| instruction_expenses | BIGINT | Instruction function expenses | +| research_expenses | BIGINT | Research function expenses | +| public_service_expenses | BIGINT | Public service function expenses | +| academic_support_expenses | BIGINT | Academic support function expenses | +| student_services_expenses | BIGINT | Student services function expenses | +| institutional_support_expenses | BIGINT | Institutional support (admin) expenses | +| auxiliary_expenses | BIGINT | Auxiliary enterprises expenses | +| hospital_expenses | BIGINT | Hospital services expenses | +| other_expenses | BIGINT | Other expenses | +| salaries_wages | BIGINT | Total salaries and wages | +| benefits | BIGINT | Total employee benefits | + +### raw_ipeds_staff + +**Source:** IPEDS S (Fall Staff) survey, occupational categories +**Granularity:** One row per institution per year +**Primary Key:** (unitid, year) + +| Column | Type | Description | +|--------|------|-------------| +| unitid | INTEGER | IPEDS institution identifier | +| year | INTEGER | Survey year | +| total_staff | INTEGER | Total staff headcount (OCCUPCAT 100) | +| faculty_total | INTEGER | Faculty headcount (OCCUPCAT 250) | +| management_total | INTEGER | Management headcount (OCCUPCAT 200) | + +### raw_ipeds_enrollment + +**Source:** IPEDS EF (Fall Enrollment) survey +**Granularity:** One row per institution per year +**Primary Key:** (unitid, year) + +| Column | Type | Description | +|--------|------|-------------| +| unitid | INTEGER | IPEDS institution identifier | +| year | INTEGER | Survey year | +| total_enrollment | INTEGER | Total student headcount (EFALEVEL=1 grand total) | + +### raw_990_filing + +**Source:** IRS 990 e-file XML — filing header +**Granularity:** One row per filing +**Primary Key:** object_id + +| Column | Type | Description | +|--------|------|-------------| +| object_id | VARCHAR | IRS e-file object ID (unique filing identifier) | +| ein | VARCHAR | Employer Identification Number | +| tax_year | INTEGER | Tax year of the filing | +| organization_name | VARCHAR | Organization name from the filing | +| return_type | VARCHAR | Return type (990, 990PF) | +| filing_date | DATE | Date the return was filed | +| total_revenue | BIGINT | Total revenue (Part I) | +| total_expenses | BIGINT | Total functional expenses (Part IX) | +| total_assets | BIGINT | Total assets (Balance Sheet) | + +### raw_990_part_vii + +**Source:** IRS 990 Part VII Section A — Officers, Directors, Key Employees +**Granularity:** One row per person per filing +**Primary Key:** id (auto-increment) + +| Column | Type | Description | +|--------|------|-------------| +| id | INTEGER | Auto-increment surrogate key | +| object_id | VARCHAR | FK to raw_990_filing | +| ein | VARCHAR | Employer Identification Number | +| tax_year | INTEGER | Tax year | +| person_name | VARCHAR | Name of officer/director/key employee | +| title | VARCHAR | Title or position | +| avg_hours_per_week | DOUBLE | Average hours per week devoted to position | +| reportable_comp_from_org | BIGINT | Reportable compensation from the organization | +| reportable_comp_from_related | BIGINT | Reportable compensation from related organizations | +| other_compensation | BIGINT | Other compensation | + +### raw_990_schedule_j + +**Source:** IRS 990 Schedule J — Compensation Information for Officers, Directors, etc. +**Granularity:** One row per person per filing +**Primary Key:** id (auto-increment) + +| Column | Type | Description | +|--------|------|-------------| +| id | INTEGER | Auto-increment surrogate key | +| object_id | VARCHAR | FK to raw_990_filing | +| ein | VARCHAR | Employer Identification Number | +| tax_year | INTEGER | Tax year | +| person_name | VARCHAR | Name of individual | +| title | VARCHAR | Title or position | +| base_compensation | BIGINT | Base compensation | +| bonus_compensation | BIGINT | Bonus and incentive compensation | +| other_compensation | BIGINT | Other reportable compensation | +| deferred_compensation | BIGINT | Deferred compensation | +| nontaxable_benefits | BIGINT | Nontaxable benefits | +| total_compensation | BIGINT | Total (sum of all compensation components) | +| compensation_from_related | BIGINT | Compensation from related organizations | + +### raw_cpi_u + +**Source:** BLS CPI-U flat file (cu.data.0.Current) +**Granularity:** One row per month +**Primary Key:** (year, month) +**Filter:** Series CUUR0000SA0 — All Urban Consumers, U.S. City Average, All Items, Not Seasonally Adjusted + +| Column | Type | Description | +|--------|------|-------------| +| year | INTEGER | Calendar year | +| month | INTEGER | Month (1-12) | +| value | DOUBLE | CPI-U index value (base period: 1982-84 = 100) | +| series_id | VARCHAR | BLS series identifier (always CUUR0000SA0) | + +### raw_admin_headcount + +**Source:** Web scraping of UD staff directory pages +**Granularity:** One row per staff member per scrape +**Primary Key:** id (auto-increment) + +| Column | Type | Description | +|--------|------|-------------| +| id | INTEGER | Auto-increment surrogate key | +| scrape_date | DATE | Date the page was scraped | +| unit | VARCHAR | Administrative unit (e.g., "Office of the President") | +| person_name | VARCHAR | Staff member name | +| title | VARCHAR | Job title | +| email | VARCHAR | Email address | +| category | VARCHAR | Classified category (LEADERSHIP, FINANCE, IT, etc.) | +| is_overhead | BOOLEAN | True = overhead, False = mission-aligned, NULL = debatable | + +## Cross-Source Relationships + +- **IPEDS tables** are linked by `unitid` (UD = 130943) +- **IRS 990 tables** are linked by `object_id` (filing) and `ein` (organization) +- **IPEDS → IRS 990:** The `ein` field in `raw_institution` links to `ein` in 990 tables. UD Foundation EINs: 516000297, 516017306 +- **CPI-U** is used for inflation adjustment — join on `year` (and optionally `month`) to any table with a year column +- **Admin headcount** links to IPEDS via institutional context (UD only in first iteration) diff --git a/phase1_plan.md b/phase1_plan.md index 4c37e30..fdac995 100644 --- a/phase1_plan.md +++ b/phase1_plan.md @@ -117,10 +117,25 @@ Sprint 3: BLS CPI-U (independent) │ + CLI + Validation ─────────────┤ ▼ Sprint 4: Stretch scraper ────── Phase 1 Complete + │ +Phase 2: SKIPPED (folded into dashboard queries) + │ +Sprint 5: Dash dashboard ──────── Phase 3 Prototype ``` --- +## Phase 2 Skip Decision (March 2026) + +Phase 2 (data pipeline & normalization) was **skipped** for the initial prototype. All derived metrics — admin cost ratios, CPI adjustments, compensation growth indices — are computed directly in dashboard SQL queries rather than a separate normalized schema. + +**Rationale:** With a single institution (UD) and a populated DuckDB, the query layer is sufficient for a local prototype. A proper Phase 2 with dbt transformations and a unified analytical schema should be built before: +- Expanding to multi-institution comparisons (Phase 4) +- Moving to a production React dashboard (Phase 3 full build) +- Adding complex cross-source joins that benefit from materialized views + +--- + ## Technical Decisions | Decision | Choice | Rationale | diff --git a/pyproject.toml b/pyproject.toml index 8828f53..0718781 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,6 +12,10 @@ dependencies = [ "xlrd>=2.0", "pyarrow>=17.0", "lxml>=5.0", + "beautifulsoup4>=4.12", + "playwright>=1.40", + "dash>=2.17", + "plotly>=5.22", ] [project.scripts] diff --git a/src/admin_analytics/bls/download.py b/src/admin_analytics/bls/download.py new file mode 100644 index 0000000..c4affbb --- /dev/null +++ b/src/admin_analytics/bls/download.py @@ -0,0 +1,32 @@ +"""BLS CPI-U flat-file download.""" + +from pathlib import Path + +import httpx + +from admin_analytics.config import BLS_CPI_URL, BLS_DATA_DIR + +CPI_FILENAME = "cu.data.0.Current" + + +def download_cpi_file(force: bool = False) -> Path: + """Download the BLS CPI-U flat file. + + Returns the path to the downloaded file. + """ + BLS_DATA_DIR.mkdir(parents=True, exist_ok=True) + dest = BLS_DATA_DIR / CPI_FILENAME + + if not force and dest.exists(): + return dest + + with httpx.Client( + follow_redirects=True, + timeout=120.0, + headers={"User-Agent": "admin-analytics/0.1 (research)"}, + ) as client: + resp = client.get(BLS_CPI_URL) + resp.raise_for_status() + + dest.write_bytes(resp.content) + return dest diff --git a/src/admin_analytics/bls/loader.py b/src/admin_analytics/bls/loader.py new file mode 100644 index 0000000..441a97a --- /dev/null +++ b/src/admin_analytics/bls/loader.py @@ -0,0 +1,56 @@ +"""Parse and load BLS CPI-U data into DuckDB.""" + +from pathlib import Path + +import duckdb +import polars as pl + + +def load_cpi(conn: duckdb.DuckDBPyConnection, file_path: Path) -> int: + """Parse the BLS CPI-U flat file and load into raw_cpi_u. + + Filters to series CUUR0000SA0 (All Urban Consumers, US City Average, + All Items, Not Seasonally Adjusted) and monthly periods M01-M12. + + Returns number of rows loaded. + """ + df = pl.read_csv( + file_path, + separator="\t", + infer_schema_length=0, # read all as strings first + ) + + # BLS pads fields with whitespace — strip all string columns + df = df.with_columns( + pl.col(col).str.strip_chars() for col in df.columns + ) + + # Normalize column names (BLS has trailing spaces in headers too) + df = df.rename({col: col.strip() for col in df.columns}) + + # Filter to CPI-U series and monthly periods only + df = df.filter( + (pl.col("series_id") == "CUUR0000SA0") + & (pl.col("period").str.starts_with("M")) + & (pl.col("period") != "M13") + ) + + # Extract month from period (M01 -> 1, M12 -> 12) + df = df.with_columns( + pl.col("year").cast(pl.Int64).alias("year"), + pl.col("period").str.slice(1).cast(pl.Int64).alias("month"), + pl.col("value").cast(pl.Float64).alias("value"), + pl.lit("CUUR0000SA0").alias("series_id"), + ).select("year", "month", "value", "series_id") + + # Idempotent: delete all existing rows then insert + conn.execute("DELETE FROM raw_cpi_u") + + rows = df.to_dicts() + for row in rows: + conn.execute( + "INSERT INTO raw_cpi_u (year, month, value, series_id) VALUES (?, ?, ?, ?)", + [row["year"], row["month"], row["value"], row["series_id"]], + ) + + return len(rows) diff --git a/src/admin_analytics/cli.py b/src/admin_analytics/cli.py index c59b70b..8186776 100644 --- a/src/admin_analytics/cli.py +++ b/src/admin_analytics/cli.py @@ -107,10 +107,83 @@ def irs990( @ingest_app.command() -def cpi() -> None: - """Ingest BLS CPI-U data. (Not yet implemented.)""" - typer.echo("CPI-U ingestion is not yet implemented (Sprint 3).") - raise typer.Exit(1) +def cpi( + force: Annotated[ + bool, typer.Option("--force", help="Re-download even if file exists") + ] = False, +) -> None: + """Ingest BLS CPI-U data.""" + conn = get_connection() + ensure_schema(conn) + + from admin_analytics.bls.download import download_cpi_file + from admin_analytics.bls.loader import load_cpi + + typer.echo("Downloading BLS CPI-U data...") + file_path = download_cpi_file(force=force) + + typer.echo("Loading CPI-U data into database...") + count = load_cpi(conn, file_path) + typer.echo(f"CPI-U ingestion complete: {count} monthly observations loaded.") + conn.close() + + +@ingest_app.command() +def scrape() -> None: + """Scrape UD staff directory pages for admin headcounts.""" + conn = get_connection() + ensure_schema(conn) + + from admin_analytics.scraper.directory import scrape_all + from admin_analytics.scraper.loader import load_scrape + from admin_analytics.scraper.classify import OVERHEAD_CATEGORIES, NON_OVERHEAD_CATEGORIES + + typer.echo("Scraping UD staff directory pages...") + entries = scrape_all() + + typer.echo("Loading scraped data into database...") + count = load_scrape(conn, entries) + + # Summary by unit and category + typer.echo(f"\nLoaded {count} staff entries.\n") + unit_counts: dict[str, dict[str, int]] = {} + for e in entries: + unit_counts.setdefault(e.unit, {}) + unit_counts[e.unit][e.category] = unit_counts[e.unit].get(e.category, 0) + 1 + + for unit, cats in sorted(unit_counts.items()): + total = sum(cats.values()) + overhead = sum(v for k, v in cats.items() if k in OVERHEAD_CATEGORIES) + typer.echo(f" {unit}: {total} staff ({overhead} overhead)") + for cat, n in sorted(cats.items(), key=lambda x: -x[1]): + typer.echo(f" {cat}: {n}") + + conn.close() + + +@app.command() +def dashboard( + port: Annotated[int, typer.Option(help="Port to serve on")] = 8050, + debug: Annotated[bool, typer.Option(help="Enable Dash debug mode")] = True, +) -> None: + """Launch the analytics dashboard.""" + from admin_analytics.dashboard.app import create_app + + dash_app = create_app() + typer.echo(f"Starting dashboard at http://localhost:{port}/") + dash_app.run(debug=debug, port=port) + + +@app.command() +def validate() -> None: + """Run data validation checks and print a report.""" + conn = get_connection() + ensure_schema(conn) + + from admin_analytics.validation import format_report + + typer.echo(format_report(conn)) + conn.close() @ingest_app.command(name="all") @@ -124,3 +197,6 @@ def ingest_all( ) -> None: """Ingest all data sources.""" ipeds(year_range=year_range, component="all", force=force) + irs990(year_range=year_range, force=force) + cpi(force=force) + scrape() diff --git a/src/admin_analytics/config.py b/src/admin_analytics/config.py index 62f53e9..16cb959 100644 --- a/src/admin_analytics/config.py +++ b/src/admin_analytics/config.py @@ -14,6 +14,10 @@ IPEDS_DATA_DIR = DATA_DIR / "ipeds" DEFAULT_YEAR_RANGE = range(2005, 2025) # 2005-2024 inclusive +# BLS CPI-U +BLS_CPI_URL = "https://download.bls.gov/pub/time.series/cu/cu.data.0.Current" +BLS_DATA_DIR = DATA_DIR / "bls" + # IRS 990 UD_EINS = [516000297, 516017306] # UD + UD Research Foundation IRS990_BASE_URL = "https://apps.irs.gov/pub/epostcard/990/xml" diff --git a/src/admin_analytics/dashboard/__init__.py b/src/admin_analytics/dashboard/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/admin_analytics/dashboard/app.py b/src/admin_analytics/dashboard/app.py new file mode 100644 index 0000000..6a0a585 --- /dev/null +++ b/src/admin_analytics/dashboard/app.py @@ -0,0 +1,53 @@ +"""Dash application factory.""" + +import dash +from dash import dcc, html, Input, Output + +from admin_analytics.db.connection import get_connection +from admin_analytics.db.schema import ensure_schema +from admin_analytics.dashboard.pages import overview, compensation, staffing, headcount + + +def create_app() -> dash.Dash: + """Create and configure the Dash application.""" + app = dash.Dash(__name__, suppress_callback_exceptions=True) + conn = get_connection() + ensure_schema(conn) + + app.layout = html.Div( + [ + html.H1( + "University of Delaware — Administrative Analytics", + style={"textAlign": "center", "padding": "20px", "color": "#00539F"}, + ), + dcc.Tabs( + id="tabs", + value="overview", + children=[ + dcc.Tab(label="Admin Cost Overview", value="overview"), + dcc.Tab(label="Executive Compensation", value="compensation"), + dcc.Tab(label="Staffing & Enrollment", value="staffing"), + dcc.Tab(label="Current Headcount", value="headcount"), + ], + style={"marginBottom": "20px"}, + ), + html.Div(id="tab-content", style={"padding": "0 20px 20px 20px"}), + ], + style={"fontFamily": "system-ui, -apple-system, sans-serif", "maxWidth": "1400px", "margin": "0 auto"}, + ) + + @app.callback(Output("tab-content", "children"), Input("tabs", "value")) + def render_tab(tab: str): + if tab == "overview": + return overview.layout(conn) + elif tab == "compensation": + return compensation.layout(conn) + elif tab == "staffing": + return staffing.layout(conn) + elif tab == "headcount": + return headcount.layout(conn) + return html.Div("Unknown tab") + + compensation.register_callbacks(app, conn) + + return app diff --git a/src/admin_analytics/dashboard/pages/__init__.py b/src/admin_analytics/dashboard/pages/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/admin_analytics/dashboard/pages/compensation.py b/src/admin_analytics/dashboard/pages/compensation.py new file mode 100644 index 0000000..45506dc --- /dev/null +++ b/src/admin_analytics/dashboard/pages/compensation.py @@ -0,0 +1,162 @@ +"""Page 2: Executive Compensation.""" + +import dash +import duckdb +from dash import html, dcc, Input, Output, dash_table +import plotly.express as px +import plotly.graph_objects as go + +from admin_analytics.dashboard.queries import ( + query_top_earners, + query_comp_by_role, + query_comp_vs_cpi, +) + +_NO_DATA = html.Div( + "No IRS 990 data loaded. Run: admin-analytics ingest irs990", + style={"textAlign": "center", "padding": "40px", "color": "#888"}, +) + +# Roles to highlight in trend chart +_KEY_ROLES = ["PRESIDENT", "PROVOST", "VP_FINANCE", "VP_RESEARCH", "VP_ADVANCEMENT", "CFO"] + + +def layout(conn: duckdb.DuckDBPyConnection): + all_earners = query_top_earners(conn) + if all_earners.height == 0: + return _NO_DATA + + years = sorted(all_earners["tax_year"].unique().to_list()) + year_options = [{"label": "All Years", "value": "all"}] + [ + {"label": str(y), "value": y} for y in years + ] + + # Compensation by role trend + role_df = query_comp_by_role(conn) + role_fig = go.Figure() + if role_df.height > 0: + role_pd = role_df.to_pandas() + for role in _KEY_ROLES: + subset = role_pd[role_pd["canonical_role"] == role] + if len(subset) > 0: + role_fig.add_trace(go.Scatter( + x=subset["tax_year"], + y=subset["total_compensation"], + mode="lines+markers", + name=role.replace("_", " ").title(), + )) + role_fig.update_layout( + title="Compensation Trends by Role", + xaxis_title="Tax Year", yaxis_title="Total Compensation ($)", + template="plotly_white", height=420, + ) + + # Comp vs CPI indexed + cpi_df = query_comp_vs_cpi(conn) + cpi_fig = go.Figure() + if cpi_df.height > 0: + cpi_pd = cpi_df.to_pandas() + cpi_fig.add_trace(go.Scatter( + x=cpi_pd["year"], y=cpi_pd["comp_index"], + mode="lines+markers", name="Top Compensation", + line={"color": "#00539F"}, + )) + cpi_fig.add_trace(go.Scatter( + x=cpi_pd["year"], y=cpi_pd["cpi_index"], + mode="lines+markers", name="CPI-U", + line={"color": "#FFD200", "dash": "dash"}, + )) + cpi_fig.update_layout( + title="Top Compensation vs CPI-U (Indexed, Base Year = 100)", + xaxis_title="Year", yaxis_title="Index", + template="plotly_white", height=380, + ) + + return html.Div([ + html.Div( + [ + html.Label("Filter by Tax Year: ", style={"fontWeight": "bold"}), + dcc.Dropdown( + id="comp-year-dropdown", + options=year_options, + value="all", + style={"width": "200px", "display": "inline-block"}, + ), + ], + style={"marginBottom": "16px"}, + ), + dash_table.DataTable( + id="comp-table", + columns=[ + {"name": "Year", "id": "tax_year"}, + {"name": "Name", "id": "person_name"}, + {"name": "Title", "id": "title"}, + {"name": "Role", "id": "canonical_role"}, + {"name": "Base", "id": "base_compensation", "type": "numeric", + "format": dash_table.Format.Format().group(True)}, + {"name": "Bonus", "id": "bonus_compensation", "type": "numeric", + "format": dash_table.Format.Format().group(True)}, + {"name": "Total", "id": "total_compensation", "type": "numeric", + "format": dash_table.Format.Format().group(True)}, + ], + data=all_earners.to_pandas().to_dict("records"), + page_size=15, + sort_action="native", + filter_action="native", + style_table={"overflowX": "auto"}, + style_cell={"textAlign": "left", "padding": "8px", "fontSize": "13px"}, + style_header={"fontWeight": "bold", "backgroundColor": "#f0f0f0"}, + ), + html.Div( + [ + html.Div(dcc.Graph(id="comp-breakdown-chart"), style={"flex": "1"}), + html.Div(dcc.Graph(figure=cpi_fig), style={"flex": "1"}), + ], + style={"display": "flex", "gap": "16px", "marginTop": "16px"}, + ), + dcc.Graph(figure=role_fig), + ]) + + +def register_callbacks(app: dash.Dash, conn: duckdb.DuckDBPyConnection) -> None: + """Register interactive callbacks for the compensation page.""" + + @app.callback( + [Output("comp-table", "data"), Output("comp-breakdown-chart", "figure")], + Input("comp-year-dropdown", "value"), + ) + def update_compensation(year_value): + year = None if year_value == "all" else int(year_value) + earners = query_top_earners(conn, year=year) + + # Table data + table_data = earners.to_pandas().to_dict("records") if earners.height > 0 else [] + + # Breakdown chart — stacked bar of comp components + breakdown_fig = go.Figure() + if earners.height > 0: + ep = earners.to_pandas().head(10) # top 10 by total comp + short_names = [n.split(",")[0][:20] if "," in n else n.split()[-1][:20] + for n in ep["person_name"]] + for comp_type, label, color in [ + ("base_compensation", "Base", "#00539F"), + ("bonus_compensation", "Bonus", "#FFD200"), + ("deferred_compensation", "Deferred", "#7FB069"), + ("nontaxable_benefits", "Benefits", "#E07A5F"), + ("other_compensation", "Other", "#999"), + ]: + if comp_type in ep.columns: + breakdown_fig.add_trace(go.Bar( + x=short_names, y=ep[comp_type], + name=label, marker_color=color, + )) + breakdown_fig.update_layout(barmode="stack") + + title_suffix = f" ({year})" if year else " (All Years)" + breakdown_fig.update_layout( + title=f"Compensation Breakdown — Top 10{title_suffix}", + xaxis_title="", yaxis_title="$", + template="plotly_white", height=380, + ) + + return table_data, breakdown_fig diff --git a/src/admin_analytics/dashboard/pages/headcount.py b/src/admin_analytics/dashboard/pages/headcount.py new file mode 100644 index 0000000..b8ae0a4 --- /dev/null +++ b/src/admin_analytics/dashboard/pages/headcount.py @@ -0,0 +1,118 @@ +"""Page 4: Current Admin Headcount (from scraper).""" + +import duckdb +from dash import html, dcc, dash_table +import plotly.express as px +import plotly.graph_objects as go + +from admin_analytics.dashboard.queries import ( + query_admin_headcount, + query_headcount_summary, +) + +_NO_DATA = html.Div( + "No headcount data loaded. Run: admin-analytics ingest scrape", + style={"textAlign": "center", "padding": "40px", "color": "#888"}, +) + + +def _kpi_card(title: str, value: str) -> html.Div: + return html.Div( + [ + html.H4(title, style={"margin": "0", "color": "#666", "fontSize": "14px"}), + html.H2(value, style={"margin": "5px 0", "color": "#00539F"}), + ], + style={ + "flex": "1", + "padding": "20px", + "backgroundColor": "#f8f9fa", + "borderRadius": "8px", + "textAlign": "center", + "margin": "0 8px", + }, + ) + + +def layout(conn: duckdb.DuckDBPyConnection): + detail_df = query_admin_headcount(conn) + if detail_df.height == 0: + return _NO_DATA + + summary_df = query_headcount_summary(conn) + detail_pd = detail_df.to_pandas() + summary_pd = summary_df.to_pandas() + + total = len(detail_pd) + overhead_count = int(detail_pd["is_overhead"].sum()) if "is_overhead" in detail_pd.columns else 0 + overhead_pct = round(overhead_count * 100 / total, 1) if total > 0 else 0 + + # KPI cards + kpi_row = html.Div( + [ + _kpi_card("Total Staff Scraped", str(total)), + _kpi_card("Overhead Staff", str(overhead_count)), + _kpi_card("Overhead %", f"{overhead_pct}%"), + ], + style={"display": "flex", "marginBottom": "24px"}, + ) + + # Staff by unit bar chart + unit_counts = summary_pd.groupby("unit")["count"].sum().reset_index().sort_values("count") + unit_fig = px.bar( + unit_counts, x="count", y="unit", orientation="h", + title="Staff Count by Unit", + labels={"count": "Staff", "unit": ""}, + color_discrete_sequence=["#00539F"], + ) + unit_fig.update_layout(template="plotly_white", height=max(300, len(unit_counts) * 30 + 100)) + + # Overhead pie + oh_data = detail_pd["is_overhead"].value_counts() + oh_labels = {True: "Overhead", False: "Non-Overhead"} + pie_fig = px.pie( + names=[oh_labels.get(k, "Debatable") for k in oh_data.index], + values=oh_data.values, + title="Overhead vs Non-Overhead", + color_discrete_sequence=["#E07A5F", "#7FB069", "#999"], + ) + pie_fig.update_layout(template="plotly_white", height=350) + + # Category distribution per unit + cat_fig = px.bar( + summary_pd, x="count", y="unit", color="category", orientation="h", + title="Category Distribution by Unit", + labels={"count": "Staff", "unit": "", "category": "Category"}, + ) + cat_fig.update_layout(template="plotly_white", height=max(300, len(unit_counts) * 30 + 100)) + + # Detail table + table = dash_table.DataTable( + columns=[ + {"name": "Unit", "id": "unit"}, + {"name": "Name", "id": "person_name"}, + {"name": "Title", "id": "title"}, + {"name": "Category", "id": "category"}, + {"name": "Overhead", "id": "is_overhead"}, + ], + data=detail_pd.to_dict("records"), + page_size=20, + sort_action="native", + filter_action="native", + style_table={"overflowX": "auto"}, + style_cell={"textAlign": "left", "padding": "8px", "fontSize": "13px"}, + style_header={"fontWeight": "bold", "backgroundColor": "#f0f0f0"}, + ) + + return html.Div([ + kpi_row, + html.Div( + [ + html.Div(dcc.Graph(figure=unit_fig), style={"flex": "1"}), + html.Div(dcc.Graph(figure=pie_fig), style={"flex": "1"}), + ], + style={"display": "flex", "gap": "16px"}, + ), + dcc.Graph(figure=cat_fig), + html.H3("Staff Directory Detail", style={"marginTop": "24px"}), + table, + ]) diff --git a/src/admin_analytics/dashboard/pages/overview.py b/src/admin_analytics/dashboard/pages/overview.py new file mode 100644 index 0000000..714a9f3 --- /dev/null +++ b/src/admin_analytics/dashboard/pages/overview.py @@ -0,0 +1,168 @@ +"""Page 1: Administrative Cost Overview.""" + +import duckdb +from dash import html, dcc +import plotly.express as px +import plotly.graph_objects as go + +from admin_analytics.dashboard.queries import ( + query_admin_cost_ratio, + query_expense_breakdown, + query_admin_per_student, + query_admin_faculty_ratio, +) + +_NO_DATA = html.Div( + "No data loaded. Run: admin-analytics ingest all", + style={"textAlign": "center", "padding": "40px", "color": "#888"}, +) + + +def _kpi_card(title: str, value: str, subtitle: str = "") -> html.Div: + return html.Div( + [ + html.H4(title, style={"margin": "0", "color": "#666", "fontSize": "14px"}), + html.H2(value, style={"margin": "5px 0", "color": "#00539F"}), + html.P(subtitle, style={"margin": "0", "color": "#999", "fontSize": "12px"}), + ], + style={ + "flex": "1", + "padding": "20px", + "backgroundColor": "#f8f9fa", + "borderRadius": "8px", + "textAlign": "center", + "margin": "0 8px", + }, + ) + + +def layout(conn: duckdb.DuckDBPyConnection): + # Admin cost ratio + ratio_df = query_admin_cost_ratio(conn) + if ratio_df.height == 0: + return _NO_DATA + + ratio_pd = ratio_df.to_pandas() + latest = ratio_pd.iloc[-1] + + # Admin per student + aps_df = query_admin_per_student(conn) + aps_pd = aps_df.to_pandas() if aps_df.height > 0 else None + latest_aps = aps_pd.iloc[-1] if aps_pd is not None else None + + # Admin-to-faculty ratio + afr_df = query_admin_faculty_ratio(conn) + afr_pd = afr_df.to_pandas() if afr_df.height > 0 else None + latest_afr = afr_pd.iloc[-1] if afr_pd is not None else None + + # KPI cards + kpi_row = html.Div( + [ + _kpi_card( + "Admin Cost Ratio", + f"{latest['admin_cost_pct']:.1f}%", + f"Institutional Support / Total Expenses ({int(latest['year'])})", + ), + _kpi_card( + "Admin Cost per Student", + f"${int(latest_aps['admin_per_student']):,}" if latest_aps is not None else "N/A", + f"CPI-adjusted: ${int(latest_aps['admin_per_student_cpi']):,}" if latest_aps is not None else "", + ), + _kpi_card( + "Admin-to-Faculty Ratio", + f"{latest_afr['admin_faculty_ratio']:.2f}" if latest_afr is not None else "N/A", + f"Management / Faculty ({int(latest_afr['year'])})" if latest_afr is not None else "", + ), + ], + style={"display": "flex", "marginBottom": "24px"}, + ) + + # Admin cost ratio trend + ratio_fig = go.Figure() + ratio_fig.add_trace(go.Scatter( + x=ratio_pd["year"], y=ratio_pd["admin_cost_pct"], + mode="lines+markers", name="Admin Cost %", + line={"color": "#00539F"}, + )) + ratio_fig.update_layout( + title="Administrative Cost Ratio Over Time", + xaxis_title="Year", yaxis_title="Institutional Support / Total Expenses (%)", + template="plotly_white", height=400, + ) + + # Expense breakdown stacked area + breakdown_df = query_expense_breakdown(conn) + if breakdown_df.height > 0: + bk_pd = breakdown_df.to_pandas() + expense_cols = [c for c in bk_pd.columns if c != "year"] + labels = { + "instruction_expenses": "Instruction", + "research_expenses": "Research", + "public_service_expenses": "Public Service", + "academic_support_expenses": "Academic Support", + "student_services_expenses": "Student Services", + "institutional_support_expenses": "Institutional Support", + "auxiliary_expenses": "Auxiliary", + "hospital_expenses": "Hospital", + "other_expenses": "Other", + } + breakdown_fig = go.Figure() + for col in expense_cols: + breakdown_fig.add_trace(go.Scatter( + x=bk_pd["year"], y=bk_pd[col] / 1e6, + mode="lines", name=labels.get(col, col), + stackgroup="one", + )) + breakdown_fig.update_layout( + title="Expenses by Function (Millions $)", + xaxis_title="Year", yaxis_title="Millions $", + template="plotly_white", height=450, + ) + else: + breakdown_fig = go.Figure() + + # Admin per student trend + aps_fig = go.Figure() + if aps_pd is not None: + aps_fig.add_trace(go.Scatter( + x=aps_pd["year"], y=aps_pd["admin_per_student"], + mode="lines+markers", name="Nominal", + line={"color": "#00539F"}, + )) + aps_fig.add_trace(go.Scatter( + x=aps_pd["year"], y=aps_pd["admin_per_student_cpi"], + mode="lines+markers", name="CPI-Adjusted", + line={"color": "#FFD200", "dash": "dash"}, + )) + aps_fig.update_layout( + title="Admin Cost per Student", + xaxis_title="Year", yaxis_title="$ per Student", + template="plotly_white", height=380, + ) + + # Admin-to-faculty ratio trend + afr_fig = go.Figure() + if afr_pd is not None: + afr_fig.add_trace(go.Scatter( + x=afr_pd["year"], y=afr_pd["admin_faculty_ratio"], + mode="lines+markers", name="Ratio", + line={"color": "#00539F"}, + )) + afr_fig.update_layout( + title="Admin-to-Faculty Ratio", + xaxis_title="Year", yaxis_title="Management / Faculty", + template="plotly_white", height=380, + ) + + return html.Div([ + kpi_row, + dcc.Graph(figure=ratio_fig), + dcc.Graph(figure=breakdown_fig), + html.Div( + [ + html.Div(dcc.Graph(figure=aps_fig), style={"flex": "1"}), + html.Div(dcc.Graph(figure=afr_fig), style={"flex": "1"}), + ], + style={"display": "flex", "gap": "16px"}, + ), + ]) diff --git a/src/admin_analytics/dashboard/pages/staffing.py b/src/admin_analytics/dashboard/pages/staffing.py new file mode 100644 index 0000000..0367d9d --- /dev/null +++ b/src/admin_analytics/dashboard/pages/staffing.py @@ -0,0 +1,97 @@ +"""Page 3: Staffing & Enrollment.""" + +import duckdb +from dash import html, dcc +import plotly.graph_objects as go + +from admin_analytics.dashboard.queries import ( + query_staff_composition, + query_student_staff_ratios, + query_growth_index, +) + +_NO_DATA = html.Div( + "No IPEDS staff data loaded. Run: admin-analytics ingest ipeds", + style={"textAlign": "center", "padding": "40px", "color": "#888"}, +) + + +def layout(conn: duckdb.DuckDBPyConnection): + staff_df = query_staff_composition(conn) + if staff_df.height == 0: + return _NO_DATA + + staff_pd = staff_df.to_pandas() + + # Staff composition stacked area + comp_fig = go.Figure() + for col, label, color in [ + ("faculty_total", "Faculty", "#00539F"), + ("management_total", "Management", "#E07A5F"), + ("other_staff", "Other Staff", "#7FB069"), + ]: + comp_fig.add_trace(go.Scatter( + x=staff_pd["year"], y=staff_pd[col], + mode="lines", name=label, + stackgroup="one", + line={"color": color}, + )) + comp_fig.update_layout( + title="Staff Composition Over Time", + xaxis_title="Year", yaxis_title="Headcount", + template="plotly_white", height=420, + ) + + # Student-to-staff ratios + ratio_df = query_student_staff_ratios(conn) + ratio_fig = go.Figure() + if ratio_df.height > 0: + ratio_pd = ratio_df.to_pandas() + ratio_fig.add_trace(go.Scatter( + x=ratio_pd["year"], y=ratio_pd["students_per_staff"], + mode="lines+markers", name="Students per Staff", + line={"color": "#00539F"}, + )) + ratio_fig.add_trace(go.Scatter( + x=ratio_pd["year"], y=ratio_pd["students_per_faculty"], + mode="lines+markers", name="Students per Faculty", + line={"color": "#FFD200"}, + )) + ratio_fig.update_layout( + title="Student-to-Staff Ratios", + xaxis_title="Year", yaxis_title="Ratio", + template="plotly_white", height=380, + ) + + # Growth index + growth_df = query_growth_index(conn) + growth_fig = go.Figure() + if growth_df.height > 0: + growth_pd = growth_df.to_pandas() + growth_fig.add_trace(go.Scatter( + x=growth_pd["year"], y=growth_pd["mgmt_index"], + mode="lines+markers", name="Management Growth", + line={"color": "#E07A5F"}, + )) + growth_fig.add_trace(go.Scatter( + x=growth_pd["year"], y=growth_pd["enrollment_index"], + mode="lines+markers", name="Enrollment Growth", + line={"color": "#00539F"}, + )) + growth_fig.add_hline(y=100, line_dash="dot", line_color="#ccc") + growth_fig.update_layout( + title="Management vs Enrollment Growth (Indexed, Base Year = 100)", + xaxis_title="Year", yaxis_title="Index", + template="plotly_white", height=380, + ) + + return html.Div([ + dcc.Graph(figure=comp_fig), + html.Div( + [ + html.Div(dcc.Graph(figure=ratio_fig), style={"flex": "1"}), + html.Div(dcc.Graph(figure=growth_fig), style={"flex": "1"}), + ], + style={"display": "flex", "gap": "16px"}, + ), + ]) diff --git a/src/admin_analytics/dashboard/queries.py b/src/admin_analytics/dashboard/queries.py new file mode 100644 index 0000000..92de7f9 --- /dev/null +++ b/src/admin_analytics/dashboard/queries.py @@ -0,0 +1,263 @@ +"""Dashboard query layer — all DuckDB queries returning polars DataFrames.""" + +from typing import Any + +import duckdb +import polars as pl + +from admin_analytics.config import UD_UNITID +from admin_analytics.irs990.titles import normalize_title + +# Shared CTE for CPI adjustment +_CPI_CTE = """ +WITH annual_cpi AS ( + SELECT year, AVG(value) AS avg_cpi + FROM raw_cpi_u + GROUP BY year +), +latest_cpi AS ( + SELECT avg_cpi FROM annual_cpi + WHERE year = (SELECT MAX(year) FROM annual_cpi) +) +""" + + +def query_admin_cost_ratio(conn: duckdb.DuckDBPyConnection) -> pl.DataFrame: + """Admin cost ratio trend with CPI-adjusted values.""" + return conn.execute(f""" + {_CPI_CTE} + SELECT + f.year, + f.institutional_support_expenses, + f.total_expenses, + ROUND(f.institutional_support_expenses * 100.0 + / NULLIF(f.total_expenses, 0), 2) AS admin_cost_pct, + ROUND(f.institutional_support_expenses + * (SELECT avg_cpi FROM latest_cpi) / ac.avg_cpi, 0) + AS inst_support_cpi_adjusted, + ROUND(f.total_expenses + * (SELECT avg_cpi FROM latest_cpi) / ac.avg_cpi, 0) + AS total_expenses_cpi_adjusted + FROM raw_ipeds_finance f + LEFT JOIN annual_cpi ac ON ac.year = f.year + WHERE f.unitid = ? + ORDER BY f.year + """, [UD_UNITID]).pl() + + +def query_expense_breakdown(conn: duckdb.DuckDBPyConnection) -> pl.DataFrame: + """Expense breakdown by function over time.""" + return conn.execute(""" + SELECT year, + instruction_expenses, research_expenses, public_service_expenses, + academic_support_expenses, student_services_expenses, + institutional_support_expenses, auxiliary_expenses, + hospital_expenses, other_expenses + FROM raw_ipeds_finance + WHERE unitid = ? + ORDER BY year + """, [UD_UNITID]).pl() + + +def query_admin_per_student(conn: duckdb.DuckDBPyConnection) -> pl.DataFrame: + """Admin cost per student (nominal and CPI-adjusted).""" + return conn.execute(f""" + {_CPI_CTE} + SELECT + f.year, + f.institutional_support_expenses, + e.total_enrollment, + ROUND(f.institutional_support_expenses * 1.0 + / NULLIF(e.total_enrollment, 0), 0) AS admin_per_student, + ROUND( + (f.institutional_support_expenses + * (SELECT avg_cpi FROM latest_cpi) / ac.avg_cpi) + / NULLIF(e.total_enrollment, 0), 0 + ) AS admin_per_student_cpi + FROM raw_ipeds_finance f + JOIN raw_ipeds_enrollment e ON e.unitid = f.unitid AND e.year = f.year + LEFT JOIN annual_cpi ac ON ac.year = f.year + WHERE f.unitid = ? + ORDER BY f.year + """, [UD_UNITID]).pl() + + +def query_admin_faculty_ratio(conn: duckdb.DuckDBPyConnection) -> pl.DataFrame: + """Admin-to-faculty ratio over time.""" + return conn.execute(""" + SELECT year, + management_total, + faculty_total, + ROUND(management_total * 1.0 / NULLIF(faculty_total, 0), 3) + AS admin_faculty_ratio + FROM raw_ipeds_staff + WHERE unitid = ? + ORDER BY year + """, [UD_UNITID]).pl() + + +def query_top_earners( + conn: duckdb.DuckDBPyConnection, year: int | None = None +) -> pl.DataFrame: + """Top earners from Schedule J, optionally filtered by year.""" + where = "WHERE j.total_compensation > 0" + params: list[Any] = [] + if year is not None: + where += " AND j.tax_year = ?" + params.append(year) + + df = conn.execute(f""" + SELECT + j.tax_year, + j.person_name, + j.title, + j.base_compensation, + j.bonus_compensation, + j.other_compensation, + j.deferred_compensation, + j.nontaxable_benefits, + j.total_compensation, + f.organization_name + FROM raw_990_schedule_j j + JOIN raw_990_filing f ON f.object_id = j.object_id + {where} + ORDER BY j.tax_year DESC, j.total_compensation DESC + """, params).pl() + + if df.height > 0: + df = df.with_columns( + pl.col("title").map_elements( + normalize_title, return_dtype=pl.Utf8 + ).alias("canonical_role") + ) + return df + + +def query_comp_by_role(conn: duckdb.DuckDBPyConnection) -> pl.DataFrame: + """Compensation trends by canonical role across years.""" + df = conn.execute(""" + SELECT j.tax_year, j.person_name, j.title, j.total_compensation + FROM raw_990_schedule_j j + JOIN raw_990_filing f ON f.object_id = j.object_id + WHERE j.total_compensation > 0 + ORDER BY j.tax_year, j.total_compensation DESC + """).pl() + + if df.height == 0: + return df + + df = df.with_columns( + pl.col("title").map_elements( + normalize_title, return_dtype=pl.Utf8 + ).alias("canonical_role") + ) + + # Keep highest-paid person per role per year + return ( + df.sort("total_compensation", descending=True) + .group_by(["tax_year", "canonical_role"]) + .first() + .sort(["tax_year", "canonical_role"]) + ) + + +def query_comp_vs_cpi(conn: duckdb.DuckDBPyConnection) -> pl.DataFrame: + """Compensation growth vs CPI growth, indexed to first available year = 100.""" + return conn.execute(""" + WITH yearly_max_comp AS ( + SELECT tax_year, MAX(total_compensation) AS top_comp + FROM raw_990_schedule_j + GROUP BY tax_year + ), + annual_cpi AS ( + SELECT year, AVG(value) AS avg_cpi + FROM raw_cpi_u GROUP BY year + ), + base AS ( + SELECT c.top_comp AS base_comp, ac.avg_cpi AS base_cpi + FROM yearly_max_comp c + JOIN annual_cpi ac ON ac.year = c.tax_year + ORDER BY c.tax_year LIMIT 1 + ) + SELECT + c.tax_year AS year, + c.top_comp, + ac.avg_cpi, + ROUND(c.top_comp * 100.0 / NULLIF((SELECT base_comp FROM base), 0), 1) + AS comp_index, + ROUND(ac.avg_cpi * 100.0 / NULLIF((SELECT base_cpi FROM base), 0), 1) + AS cpi_index + FROM yearly_max_comp c + JOIN annual_cpi ac ON ac.year = c.tax_year + ORDER BY year + """).pl() + + +def query_staff_composition(conn: duckdb.DuckDBPyConnection) -> pl.DataFrame: + """Staff composition over time.""" + return conn.execute(""" + SELECT year, total_staff, faculty_total, management_total, + total_staff - COALESCE(faculty_total, 0) - COALESCE(management_total, 0) + AS other_staff + FROM raw_ipeds_staff + WHERE unitid = ? + ORDER BY year + """, [UD_UNITID]).pl() + + +def query_student_staff_ratios(conn: duckdb.DuckDBPyConnection) -> pl.DataFrame: + """Student-to-staff and student-to-faculty ratios.""" + return conn.execute(""" + SELECT s.year, e.total_enrollment, s.total_staff, s.faculty_total, + ROUND(e.total_enrollment * 1.0 / NULLIF(s.total_staff, 0), 1) + AS students_per_staff, + ROUND(e.total_enrollment * 1.0 / NULLIF(s.faculty_total, 0), 1) + AS students_per_faculty + FROM raw_ipeds_staff s + JOIN raw_ipeds_enrollment e ON e.unitid = s.unitid AND e.year = s.year + WHERE s.unitid = ? + ORDER BY s.year + """, [UD_UNITID]).pl() + + +def query_growth_index(conn: duckdb.DuckDBPyConnection) -> pl.DataFrame: + """Management vs enrollment growth, indexed to first year = 100.""" + return conn.execute(""" + WITH base AS ( + SELECT s.management_total AS base_mgmt, e.total_enrollment AS base_enrl + FROM raw_ipeds_staff s + JOIN raw_ipeds_enrollment e ON e.unitid = s.unitid AND e.year = s.year + WHERE s.unitid = ? + ORDER BY s.year LIMIT 1 + ) + SELECT s.year, + s.management_total, + e.total_enrollment, + ROUND(s.management_total * 100.0 + / NULLIF((SELECT base_mgmt FROM base), 0), 1) AS mgmt_index, + ROUND(e.total_enrollment * 100.0 + / NULLIF((SELECT base_enrl FROM base), 0), 1) AS enrollment_index + FROM raw_ipeds_staff s + JOIN raw_ipeds_enrollment e ON e.unitid = s.unitid AND e.year = s.year + WHERE s.unitid = ? + ORDER BY s.year + """, [UD_UNITID, UD_UNITID]).pl() + + +def query_admin_headcount(conn: duckdb.DuckDBPyConnection) -> pl.DataFrame: + """All scraped admin headcount entries.""" + return conn.execute(""" + SELECT unit, person_name, title, category, is_overhead, scrape_date + FROM raw_admin_headcount + ORDER BY unit, category, person_name + """).pl() + + +def query_headcount_summary(conn: duckdb.DuckDBPyConnection) -> pl.DataFrame: + """Headcount summary by unit and category.""" + return conn.execute(""" + SELECT unit, category, is_overhead, COUNT(*) AS count + FROM raw_admin_headcount + GROUP BY unit, category, is_overhead + ORDER BY unit, count DESC + """).pl() diff --git a/src/admin_analytics/db/schema.py b/src/admin_analytics/db/schema.py index b90c0fa..4fbea4e 100644 --- a/src/admin_analytics/db/schema.py +++ b/src/admin_analytics/db/schema.py @@ -109,6 +109,19 @@ TABLES = { PRIMARY KEY (year, month) ) """, + "raw_admin_headcount": """ + CREATE SEQUENCE IF NOT EXISTS seq_admin_headcount START 1; + CREATE TABLE IF NOT EXISTS raw_admin_headcount ( + id INTEGER PRIMARY KEY DEFAULT nextval('seq_admin_headcount'), + scrape_date DATE NOT NULL, + unit VARCHAR NOT NULL, + person_name VARCHAR, + title VARCHAR, + email VARCHAR, + category VARCHAR, + is_overhead BOOLEAN + ) + """, } diff --git a/src/admin_analytics/irs990/download.py b/src/admin_analytics/irs990/download.py index d7efd9c..dc9be1d 100644 --- a/src/admin_analytics/irs990/download.py +++ b/src/admin_analytics/irs990/download.py @@ -2,6 +2,7 @@ import io import re +import subprocess import tempfile import zipfile from pathlib import Path @@ -57,11 +58,14 @@ def filter_index( def get_batch_list(year: int) -> list[str]: """Get the list of available TEOS XML batch ZIP IDs for a year. - Tries common naming patterns: {YEAR}_TEOS_XML_{MM}{L} where - MM is 01-12 and L is A-D. + Two naming conventions exist: + - 2021+: {YEAR}_TEOS_XML_{MM}{L}.zip (e.g. 2023_TEOS_XML_05A) + - 2017-2020: download990xml_{YEAR}_{N}.zip (e.g. download990xml_2020_1) + Also checks for _CT1 (correction/catchup) batches. """ batches = [] with httpx.Client(follow_redirects=True, timeout=30.0) as client: + # Modern pattern (2021+) for month in range(1, 13): for letter in "ABCD": batch_id = f"{year}_TEOS_XML_{month:02d}{letter}" @@ -72,9 +76,61 @@ def get_batch_list(year: int) -> list[str]: batches.append(batch_id) except httpx.HTTPError: continue + + # Legacy pattern (2017-2020) + for n in range(1, 20): + batch_id = f"download990xml_{year}_{n}" + url = f"{config.IRS990_BASE_URL}/{year}/{batch_id}.zip" + try: + resp = client.head(url) + if resp.status_code == 200: + batches.append(batch_id) + else: + break # sequential numbering, stop at first miss + except httpx.HTTPError: + break + + # Correction/catchup batch + ct_id = f"{year}_TEOS_XML_CT1" + url = f"{config.IRS990_BASE_URL}/{year}/{ct_id}.zip" + try: + resp = client.head(url) + if resp.status_code == 200: + batches.append(ct_id) + except httpx.HTTPError: + pass + return batches +def _extract_with_system_unzip( + zip_path: Path, target_name: str, dest: Path +) -> bool: + """Extract a file from a ZIP using the system 7z or unzip command.""" + dest.parent.mkdir(parents=True, exist_ok=True) + + with tempfile.TemporaryDirectory() as tmp_dir: + # Try 7z first (handles more compression methods), then unzip + for cmd in [ + ["7z", "x", str(zip_path), f"-o{tmp_dir}", f"*{target_name}", "-y"], + ["unzip", "-o", "-j", str(zip_path), f"*{target_name}", "-d", tmp_dir], + ]: + try: + result = subprocess.run( + cmd, capture_output=True, timeout=300 + ) + if result.returncode == 0: + extracted = list(Path(tmp_dir).rglob(target_name)) + if extracted: + import shutil + shutil.move(str(extracted[0]), str(dest)) + return True + except FileNotFoundError: + continue + + return False + + def _extract_xml_from_zip( zip_url: str, object_id: str, dest: Path ) -> bool: @@ -96,20 +152,27 @@ def _extract_xml_from_zip( for chunk in resp.iter_bytes(chunk_size=1024 * 1024): f.write(chunk) - with zipfile.ZipFile(tmp_path) as zf: - names = zf.namelist() - # Look for our target file (case-insensitive) - match = None - for name in names: - if name.lower() == target_name.lower(): - match = name - break - if match is None: - return False - dest.parent.mkdir(parents=True, exist_ok=True) - with zf.open(match) as src, open(dest, "wb") as dst: - dst.write(src.read()) - return True + try: + with zipfile.ZipFile(tmp_path) as zf: + names = zf.namelist() + # Look for our target file — may be in a subdirectory within the ZIP + # e.g. "2024_TEOS_XML_04A/202421029349301032_public.xml" + match = None + for name in names: + basename = name.rsplit("/", 1)[-1] if "/" in name else name + if basename.lower() == target_name.lower(): + match = name + break + if match is None: + return False + dest.parent.mkdir(parents=True, exist_ok=True) + with zf.open(match) as src, open(dest, "wb") as dst: + dst.write(src.read()) + return True + except NotImplementedError: + # Unsupported compression (e.g. Zstandard in newer ZIPs). + # Fall back to system unzip. + return _extract_with_system_unzip(tmp_path, target_name, dest) finally: tmp_path.unlink(missing_ok=True) @@ -136,8 +199,9 @@ def download_filing_xml( return dest if batch_id: - # We know which ZIP to look in - url = f"{config.IRS990_BASE_URL}/{year}/{batch_id}.zip" + # We know which ZIP to look in. Index may have lowercase but URLs need uppercase. + batch_id_upper = batch_id.strip().upper() + url = f"{config.IRS990_BASE_URL}/{year}/{batch_id_upper}.zip" if _extract_xml_from_zip(url, object_id, dest): return dest return None diff --git a/src/admin_analytics/scraper/classify.py b/src/admin_analytics/scraper/classify.py new file mode 100644 index 0000000..99640df --- /dev/null +++ b/src/admin_analytics/scraper/classify.py @@ -0,0 +1,121 @@ +"""Classify staff by title into functional categories. + +Categories distinguish admin overhead from grant-funded, student-facing, +and technical roles. This is critical because IPEDS/IRE "staff" counts +lump everyone who isn't tenure-track faculty — including postdocs, +research scientists, and lab technicians who are soft-funded through +extramural research and are NOT administrative overhead. +""" + +import re + +# Order matters — first match wins. +CATEGORY_PATTERNS: list[tuple[str, re.Pattern]] = [ + # Leadership / structural overhead + ("LEADERSHIP", re.compile( + r"\b(dean|chief of staff|associate dean|assistant dean)\b", re.I)), + + # Faculty listed on staff page — not admin (must come before RESEARCH + # to catch "Adjunct Professor NIST" as faculty, not research) + ("FACULTY", re.compile( + r"\b(professor|lecturer|instructor|faculty|adjunct|affiliated)", re.I)), + + # Grants administration — debatable; supports extramural funding + ("GRANTS_ADMIN", re.compile( + r"\b(grants?\s+anal|pre-?award|post-?award|closeout\s+coord" + r"|sponsored\s+program|grants?\s+admin|grants?\s+manag|grants?\s+coord)", re.I)), + + # Research staff — soft-funded, NOT admin bloat + ("RESEARCH", re.compile( + r"\b(research\s+(?:associate|assistant|scientist|scholar|fellow)" + r"|postdoc|post-?doctoral|(?:associate\s+)?scientist\b)", re.I)), + + # Academic / student-facing support + ("ACADEMIC_SUPPORT", re.compile( + r"\b(academic\s+(?:advisor|analyst|program)|undergrad\w*\s+(?:recruit|advisor|affairs)" + r"|graduate\s+(?:services|advisor)|student\s+(?:develop|support|services)" + r"|program\s+(?:coordinator|manager))", re.I)), + + # Advancement / development — revenue-generating (fundraising) + ("ADVANCEMENT", re.compile( + r"\b(development|fundrais|advancement|alumni\s+relation|donor|giving)", re.I)), + + # Finance / procurement + ("FINANCE", re.compile( + r"\b(financial|fiscal|budget|procurement|business\s+(?:officer|admin)" + r"|sr\.?\s+business)", re.I)), + + # IT / computing + ("IT", re.compile( + r"\b(computing|systems?\s+(?:prog|admin)|it\s+|information\s+tech" + r"|support\s+specialist|service\s+desk|digital\s+tech)", re.I)), + + # Communications / marketing (must come before DIRECTOR) + ("COMMUNICATIONS", re.compile( + r"\b(communicat\w+|marketing|media\s+(?:specialist|coord|director)" + r"|web\s+(?:develop|design|content)|event\s+(?:coord|plan|manag))", re.I)), + + # Human resources + ("HR", re.compile( + r"\b(human\s+resource|hr\s+analyst|talent|workforce)", re.I)), + + # Facilities / space management + ("FACILITIES", re.compile( + r"\b(facilit|building|space\s+(?:plan|manag)|safety|engineer\w+\s+facilit)", re.I)), + + # Technical / lab operations — not admin bloat + ("TECHNICAL", re.compile( + r"\b(machinist|lab\s+(?:manager|coord|tech)|equipment|technician" + r"|instrument)", re.I)), + + # Administrative support + ("ADMIN_SUPPORT", re.compile( + r"\b(admin\w*\s+(?:assistant|specialist|support|secretary|coord)" + r"|secretary|receptionist|office\s+(?:manager|coord))", re.I)), + + # Director-level (catch remaining directors) + ("DIRECTOR", re.compile( + r"\b(director|associate\s+director|sr\.?\s+director)\b", re.I)), +] + +# Which categories count as administrative overhead +OVERHEAD_CATEGORIES = { + "LEADERSHIP", "FINANCE", "IT", "COMMUNICATIONS", "HR", + "FACILITIES", "ADMIN_SUPPORT", "DIRECTOR", +} + +# Debatable — could go either way depending on analysis +DEBATABLE_CATEGORIES = {"GRANTS_ADMIN"} + +# NOT overhead — these are mission-aligned or revenue-generating +NON_OVERHEAD_CATEGORIES = { + "RESEARCH", "ACADEMIC_SUPPORT", "ADVANCEMENT", "TECHNICAL", "FACULTY", +} + + +def classify_title(title: str | None) -> str: + """Classify a staff title into a functional category. + + Returns the category string, or "UNKNOWN" if no pattern matches. + """ + if not title or not title.strip(): + return "UNKNOWN" + + for category, pattern in CATEGORY_PATTERNS: + if pattern.search(title): + return category + + return "UNKNOWN" + + +def is_overhead(category: str) -> bool | None: + """Return True if the category is administrative overhead, + False if not, None if debatable. + """ + if category in OVERHEAD_CATEGORIES: + return True + if category in NON_OVERHEAD_CATEGORIES: + return False + if category in DEBATABLE_CATEGORIES: + return None + return None diff --git a/src/admin_analytics/scraper/directory.py b/src/admin_analytics/scraper/directory.py new file mode 100644 index 0000000..c2f2ecc --- /dev/null +++ b/src/admin_analytics/scraper/directory.py @@ -0,0 +1,292 @@ +"""Scrape UD staff directory pages for headcounts and title classification. + +Two rendering modes: + - Static (httpx): for pages that embed staff data in HTML source + - Dynamic (Playwright): for JS-rendered pages (Divi/WordPress AJAX) + +The scraper targets the Engineering management line: + Department staff → COE Central → Provost → President +""" + +from __future__ import annotations + +import re +from dataclasses import dataclass, field +from datetime import date +from pathlib import Path + +import httpx + +from admin_analytics.scraper.classify import classify_title + + +@dataclass +class StaffEntry: + name: str + title: str | None + email: str | None + category: str = "" + unit: str = "" + + def __post_init__(self): + if not self.category and self.title: + self.category = classify_title(self.title) + + +@dataclass +class DirectoryPage: + """Configuration for a single directory page to scrape.""" + unit: str # e.g. "COE Central", "ME", "Provost Office" + url: str + requires_js: bool = False # True = needs Playwright + + +# UD College of Engineering management line + departments +DIRECTORY_PAGES = [ + # COE Central admin (JS-rendered) + DirectoryPage("COE Central", "https://engr.udel.edu/our-people/directory/", requires_js=True), + # Engineering departments + DirectoryPage("ME", "https://me.udel.edu/people/staff/", requires_js=True), + DirectoryPage("CBE", "https://cbe.udel.edu/people/staff/", requires_js=False), + DirectoryPage("ECE", "https://www.ece.udel.edu/people/staff/", requires_js=False), + DirectoryPage("CIS", "https://www.cis.udel.edu/people/staff/", requires_js=False), + DirectoryPage("BME", "https://bme.udel.edu/people/staff/", requires_js=True), + DirectoryPage("CCEE", "https://www.ccee.udel.edu/people/staff/", requires_js=False), + DirectoryPage("MSE", "https://mseg.udel.edu/people/staff/", requires_js=True), + # Management line above Engineering + DirectoryPage("Provost Office", "https://provost.udel.edu/about/our-staff/", requires_js=False), + DirectoryPage("President Office", "https://www.udel.edu/about/leadership/", requires_js=False), +] + + +def _extract_from_rendered_text(text: str, unit: str) -> list[StaffEntry]: + """Extract staff entries from rendered page text. + + The JS-rendered Divi pages display staff as: + Name + Title + [Phone] + email@udel.edu + + We anchor on email addresses and look backwards for name/title. + """ + lines = [line.strip() for line in text.split("\n") if line.strip()] + entries: list[StaffEntry] = [] + seen_emails: set[str] = set() + + for i, line in enumerate(lines): + emails = re.findall(r"[\w.-]+@udel\.edu", line) + for email in emails: + if email in seen_emails: + continue + # Skip generic/departmental emails + if any(k in email for k in ["-info", "dept", "help", "college", "engr@", "coe-"]): + continue + seen_emails.add(email) + + # Look backwards for name and title + name, title = _find_name_title(lines, i) + if name: + entries.append(StaffEntry( + name=name, title=title, email=email, unit=unit, + )) + + return entries + + +def _find_name_title(lines: list[str], email_idx: int) -> tuple[str | None, str | None]: + """Look backwards from an email line to find the person's name and title.""" + name = None + title = None + + # Title keywords that distinguish a title line from a name line + title_keywords = re.compile( + r"(director|manager|specialist|advisor|analyst|coordinator|assistant|associate" + r"|officer|chief|dean|admin|support|computing|systems|financial|grants?" + r"|machinist|lab |academic|program|human|facilit|communicat|development" + r"|procurement|sr\.|senior|recruit|budget|research|scientist|engineer" + r"|technician|professor|lecturer)", re.I, + ) + + for back in range(1, 6): + idx = email_idx - back + if idx < 0: + break + prev = lines[idx].strip() + + # Stop if we hit another email (previous person's entry) + if "@udel.edu" in prev: + break + + # Skip phone numbers + if re.match(r"^[\d\-\(\)\s]{7,}$", prev): + continue + + # Is this a title or a name? + if title is None and title_keywords.search(prev): + title = prev + elif name is None and re.match(r"^[A-Z][a-z]", prev) and len(prev) < 50: + # Looks like a name (starts with capital, not too long) + if not title_keywords.search(prev): + name = prev + elif title is None: + # It has title keywords — it's actually a title + title = prev + else: + name = prev + + if name and title: + break + + return name, title + + +def _extract_from_html(html_text: str, unit: str) -> list[StaffEntry]: + """Extract staff from static HTML source using email anchoring.""" + # Strip HTML tags to get plain text + text = re.sub(r"<[^>]+>", " ", html_text) + text = re.sub(r"&\w+;", " ", text) + text = re.sub(r"\s+", " ", text).strip() + + entries: list[StaffEntry] = [] + seen_emails: set[str] = set() + + for m in re.finditer(r"[\w.-]+@udel\.edu", text): + email = m.group() + if email in seen_emails: + continue + if any(k in email for k in ["-info", "dept", "help", "college", "engr@", "coe-"]): + continue + seen_emails.add(email) + + # Get context before the email + start = max(0, m.start() - 300) + ctx = text[start:m.start()] + + # Try to extract name + title from context + name, title = _parse_context_for_name_title(ctx) + if name: + entries.append(StaffEntry( + name=name, title=title, email=email, unit=unit, + )) + + return entries + + +def _parse_context_for_name_title(ctx: str) -> tuple[str | None, str | None]: + """Parse a text context block (before an email) for name and title.""" + # Pattern: Name Title [Location] [Phone] + m = re.search( + r"([A-Z][a-z]+(?:\s[A-Z]\.?)?\s(?:Mc|De|Van|La)?[A-Z][a-z]+(?:-[A-Z][a-z]+)?)\s+" + r"([A-Za-z/,.& ]+?(?:I{1,3}|IV|V)?)\s+" + r"(?:\d+\s+\w|\d{3}-)", + ctx, + ) + if m: + return m.group(1).strip(), m.group(2).strip() + + # Fallback: just find the last proper name in the context + names = re.findall( + r"([A-Z][a-z]+(?:\s[A-Z]\.?)?\s(?:Mc|De|Van|La)?[A-Z][a-z]+(?:-[A-Z][a-z]+)?)", + ctx, + ) + if names: + return names[-1], None + + return None, None + + +def _extract_provost_staff(html_text: str) -> list[StaffEntry]: + """Custom parser for the Provost office page which has a distinct structure.""" + from lxml import html as lxml_html + + tree = lxml_html.fromstring(html_text) + entries: list[StaffEntry] = [] + + # Leadership: h2 tags contain titles, strong tags contain names + h2s = tree.xpath("//h2") + strongs = tree.xpath("//strong") + + strong_texts = [s.text_content().strip() for s in strongs if s.text_content().strip()] + + for h2 in h2s: + title = h2.text_content().strip() + if not title or title in ("Administrative Staff", "Staff", "Quick Links", "Recent News"): + continue + # The name follows the h2 in a nearby strong tag or paragraph + # Match by position — h2 title, then the next unmatched strong is the name + if strong_texts: + name = strong_texts.pop(0) + entries.append(StaffEntry( + name=name, title=title, email=None, unit="Provost Office", + )) + + # Administrative staff section: "Title, Name" format in the remaining strongs + for st in strong_texts: + if "," in st: + parts = st.split(",", 1) + title = parts[0].strip() + name = parts[1].strip() + entries.append(StaffEntry( + name=name, title=title, email=None, unit="Provost Office", + )) + + return entries + + +def scrape_static(page: DirectoryPage) -> list[StaffEntry]: + """Scrape a static HTML page using httpx.""" + with httpx.Client( + follow_redirects=True, + timeout=30.0, + headers={"User-Agent": "Mozilla/5.0 (compatible; UDAdminAnalytics/0.1)"}, + ) as client: + resp = client.get(page.url) + resp.raise_for_status() + + if page.unit == "Provost Office": + return _extract_provost_staff(resp.text) + + return _extract_from_html(resp.text, page.unit) + + +def scrape_dynamic(page: DirectoryPage) -> list[StaffEntry]: + """Scrape a JS-rendered page using Playwright.""" + from playwright.sync_api import sync_playwright + + with sync_playwright() as p: + browser = p.chromium.launch(headless=True) + pw_page = browser.new_page() + pw_page.goto(page.url, wait_until="networkidle", timeout=45000) + text = pw_page.inner_text("body") + browser.close() + + return _extract_from_rendered_text(text, page.unit) + + +def scrape_page(page: DirectoryPage) -> list[StaffEntry]: + """Scrape a single directory page, choosing the right method.""" + if page.requires_js: + return scrape_dynamic(page) + return scrape_static(page) + + +def scrape_all(pages: list[DirectoryPage] | None = None) -> list[StaffEntry]: + """Scrape all configured directory pages. + + Returns a flat list of all staff entries across all units. + """ + pages = pages or DIRECTORY_PAGES + all_entries: list[StaffEntry] = [] + + for page in pages: + method = "Playwright" if page.requires_js else "httpx" + print(f" Scraping {page.unit} ({method})...") + try: + entries = scrape_page(page) + print(f" Found {len(entries)} staff") + all_entries.extend(entries) + except Exception as e: + print(f" Error: {e}") + + return all_entries diff --git a/src/admin_analytics/scraper/loader.py b/src/admin_analytics/scraper/loader.py new file mode 100644 index 0000000..1606b4b --- /dev/null +++ b/src/admin_analytics/scraper/loader.py @@ -0,0 +1,49 @@ +"""Load scraped staff directory data into DuckDB.""" + +from datetime import date + +import duckdb + +from admin_analytics.scraper.classify import is_overhead +from admin_analytics.scraper.directory import StaffEntry + + +def load_scrape( + conn: duckdb.DuckDBPyConnection, + entries: list[StaffEntry], + scrape_date: date | None = None, +) -> int: + """Load scraped staff entries into raw_admin_headcount. + + Clears previous data for the same scrape_date before inserting, + making re-runs idempotent. + + Returns number of rows inserted. + """ + scrape_date = scrape_date or date.today() + + conn.execute( + "DELETE FROM raw_admin_headcount WHERE scrape_date = ?", + [scrape_date], + ) + + count = 0 + for entry in entries: + overhead = is_overhead(entry.category) + conn.execute( + """INSERT INTO raw_admin_headcount + (scrape_date, unit, person_name, title, email, category, is_overhead) + VALUES (?, ?, ?, ?, ?, ?, ?)""", + [ + scrape_date, + entry.unit, + entry.name, + entry.title, + entry.email, + entry.category, + overhead, + ], + ) + count += 1 + + return count diff --git a/src/admin_analytics/validation.py b/src/admin_analytics/validation.py new file mode 100644 index 0000000..a68027d --- /dev/null +++ b/src/admin_analytics/validation.py @@ -0,0 +1,168 @@ +"""Data validation queries for the raw data layer.""" + +from typing import Any + +import duckdb + +from admin_analytics.config import DEFAULT_YEAR_RANGE +from admin_analytics.db.schema import TABLES + +# Key columns to check for NULLs per table +KEY_COLUMNS: dict[str, list[str]] = { + "raw_institution": ["unitid", "year", "institution_name"], + "raw_ipeds_finance": ["unitid", "year", "total_expenses"], + "raw_ipeds_staff": ["unitid", "year", "total_staff"], + "raw_ipeds_enrollment": ["unitid", "year", "total_enrollment"], + "raw_990_filing": ["ein", "tax_year", "total_revenue", "total_expenses"], + "raw_990_part_vii": ["ein", "tax_year", "person_name", "reportable_comp_from_org"], + "raw_990_schedule_j": ["ein", "tax_year", "person_name", "total_compensation"], + "raw_cpi_u": ["year", "month", "value"], + "raw_admin_headcount": ["unit", "person_name", "category"], +} + +# Year column name per table (some use tax_year, some have no year) +YEAR_COLUMN: dict[str, str] = { + "raw_institution": "year", + "raw_ipeds_finance": "year", + "raw_ipeds_staff": "year", + "raw_ipeds_enrollment": "year", + "raw_990_filing": "tax_year", + "raw_990_part_vii": "tax_year", + "raw_990_schedule_j": "tax_year", + "raw_cpi_u": "year", +} + + +def validate_row_counts(conn: duckdb.DuckDBPyConnection) -> dict[str, int]: + """Return row count per raw table.""" + counts = {} + for table in TABLES: + row = conn.execute(f"SELECT COUNT(*) FROM {table}").fetchone() + counts[table] = row[0] + return counts + + +def validate_null_rates(conn: duckdb.DuckDBPyConnection) -> dict[str, dict[str, float]]: + """Return NULL percentage for key columns per table. + + Only tables with rows are included. Returns {table: {column: pct}}. + """ + results: dict[str, dict[str, float]] = {} + for table, columns in KEY_COLUMNS.items(): + total = conn.execute(f"SELECT COUNT(*) FROM {table}").fetchone()[0] + if total == 0: + continue + rates: dict[str, float] = {} + for col in columns: + null_count = conn.execute( + f"SELECT COUNT(*) FROM {table} WHERE {col} IS NULL" + ).fetchone()[0] + rates[col] = round(null_count * 100.0 / total, 1) + results[table] = rates + return results + + +def validate_year_coverage( + conn: duckdb.DuckDBPyConnection, +) -> dict[str, dict[str, Any]]: + """Return year coverage info per table. + + Returns {table: {"years": [...], "gaps": [...]}}. + """ + results: dict[str, dict[str, Any]] = {} + expected = set(DEFAULT_YEAR_RANGE) + + for table, col in YEAR_COLUMN.items(): + rows = conn.execute( + f"SELECT DISTINCT {col} FROM {table} ORDER BY {col}" + ).fetchall() + years = [r[0] for r in rows] + if not years: + results[table] = {"years": [], "gaps": sorted(expected)} + continue + present = set(years) + gaps = sorted(expected - present) + results[table] = {"years": years, "gaps": gaps} + + return results + + +def validate_cross_source_consistency( + conn: duckdb.DuckDBPyConnection, +) -> dict[str, Any]: + """Check year overlap across IPEDS, IRS 990, and CPI sources.""" + sources = { + "ipeds_finance": ("raw_ipeds_finance", "year"), + "irs990": ("raw_990_filing", "tax_year"), + "cpi": ("raw_cpi_u", "year"), + } + + year_sets: dict[str, set[int]] = {} + for key, (table, col) in sources.items(): + rows = conn.execute( + f"SELECT DISTINCT {col} FROM {table}" + ).fetchall() + year_sets[key] = {r[0] for r in rows} + + non_empty = {k: v for k, v in year_sets.items() if v} + if len(non_empty) < 2: + all_years: set[int] = set() + else: + all_years = set.intersection(*non_empty.values()) + + return { + "source_years": {k: sorted(v) for k, v in year_sets.items()}, + "years_in_all_sources": sorted(all_years), + } + + +def format_report(conn: duckdb.DuckDBPyConnection) -> str: + """Generate a full text validation report.""" + lines: list[str] = [] + + # Row counts + lines.append("=== Row Counts ===") + counts = validate_row_counts(conn) + for table, count in counts.items(): + lines.append(f" {table:<30s} {count:>8d}") + + # NULL rates + lines.append("\n=== NULL Rates (%) ===") + nulls = validate_null_rates(conn) + if not nulls: + lines.append(" (no data loaded)") + for table, rates in nulls.items(): + lines.append(f" {table}:") + for col, pct in rates.items(): + flag = " !" if pct > 0 else "" + lines.append(f" {col:<35s} {pct:>5.1f}%{flag}") + + # Year coverage + lines.append("\n=== Year Coverage ===") + coverage = validate_year_coverage(conn) + for table, info in coverage.items(): + years = info["years"] + gaps = info["gaps"] + if not years: + lines.append(f" {table:<30s} no data") + continue + yr_range = f"{min(years)}-{max(years)} ({len(years)} years)" + lines.append(f" {table:<30s} {yr_range}") + if gaps: + lines.append(f" gaps: {gaps}") + + # Cross-source + lines.append("\n=== Cross-Source Consistency ===") + cs = validate_cross_source_consistency(conn) + for source, years in cs["source_years"].items(): + if years: + lines.append(f" {source:<20s} {min(years)}-{max(years)} ({len(years)} years)") + else: + lines.append(f" {source:<20s} no data") + overlap = cs["years_in_all_sources"] + if overlap: + lines.append(f" Years in all sources: {min(overlap)}-{max(overlap)} ({len(overlap)} years)") + else: + lines.append(" Years in all sources: none") + + return "\n".join(lines) diff --git a/tests/fixtures/cu_data_sample.tsv b/tests/fixtures/cu_data_sample.tsv new file mode 100644 index 0000000..54ded35 --- /dev/null +++ b/tests/fixtures/cu_data_sample.tsv @@ -0,0 +1,9 @@ +series_id year period value footnote_codes +CUUR0000SA0 2022 M01 281.148 +CUUR0000SA0 2022 M02 283.716 +CUUR0000SA0 2022 M12 296.797 +CUUR0000SA0 2022 M13 292.655 +CUUR0000SA0 2023 M01 299.170 +CUUR0000SA0 2023 M06 305.109 +CUSR0000SA0 2023 M01 298.432 +CUUR0000SA0 2023 S01 302.108 diff --git a/tests/test_bls.py b/tests/test_bls.py new file mode 100644 index 0000000..355c645 --- /dev/null +++ b/tests/test_bls.py @@ -0,0 +1,75 @@ +"""Tests for BLS CPI-U download and loader.""" + +import httpx +import respx + +from admin_analytics.bls.download import download_cpi_file +from admin_analytics.bls.loader import load_cpi +from admin_analytics.config import BLS_CPI_URL + + +class TestDownload: + @respx.mock + def test_download_creates_file(self, tmp_path, monkeypatch): + monkeypatch.setattr("admin_analytics.bls.download.BLS_DATA_DIR", tmp_path) + respx.get(BLS_CPI_URL).mock( + return_value=httpx.Response(200, text="series_id\tyear\tperiod\tvalue\n") + ) + path = download_cpi_file(force=True) + assert path.exists() + assert path.parent == tmp_path + + @respx.mock + def test_download_skips_when_exists(self, tmp_path, monkeypatch): + monkeypatch.setattr("admin_analytics.bls.download.BLS_DATA_DIR", tmp_path) + existing = tmp_path / "cu.data.0.Current" + existing.write_text("cached") + path = download_cpi_file(force=False) + assert path.read_text() == "cached" + + @respx.mock + def test_download_force_overwrites(self, tmp_path, monkeypatch): + monkeypatch.setattr("admin_analytics.bls.download.BLS_DATA_DIR", tmp_path) + existing = tmp_path / "cu.data.0.Current" + existing.write_text("old") + respx.get(BLS_CPI_URL).mock( + return_value=httpx.Response(200, text="new data") + ) + path = download_cpi_file(force=True) + assert path.read_text() == "new data" + + +class TestLoader: + def test_load_cpi(self, db_conn, fixtures_dir): + fixture = fixtures_dir / "cu_data_sample.tsv" + count = load_cpi(db_conn, fixture) + # Fixture has 5 valid CUUR0000SA0 monthly rows (M01, M02, M12, M01, M06) + # Excludes: M13 (annual avg), CUSR0000SA0 (wrong series), S01 (semi-annual) + assert count == 5 + + def test_load_cpi_correct_values(self, db_conn, fixtures_dir): + fixture = fixtures_dir / "cu_data_sample.tsv" + load_cpi(db_conn, fixture) + rows = db_conn.execute( + "SELECT year, month, value FROM raw_cpi_u ORDER BY year, month" + ).fetchall() + assert rows[0] == (2022, 1, 281.148) + assert rows[-1] == (2023, 6, 305.109) + + def test_load_cpi_types(self, db_conn, fixtures_dir): + fixture = fixtures_dir / "cu_data_sample.tsv" + load_cpi(db_conn, fixture) + row = db_conn.execute( + "SELECT year, month, value, series_id FROM raw_cpi_u LIMIT 1" + ).fetchone() + assert isinstance(row[0], int) + assert isinstance(row[1], int) + assert isinstance(row[2], float) + assert row[3] == "CUUR0000SA0" + + def test_load_cpi_idempotent(self, db_conn, fixtures_dir): + fixture = fixtures_dir / "cu_data_sample.tsv" + load_cpi(db_conn, fixture) + load_cpi(db_conn, fixture) + count = db_conn.execute("SELECT COUNT(*) FROM raw_cpi_u").fetchone()[0] + assert count == 5 diff --git a/tests/test_dashboard_queries.py b/tests/test_dashboard_queries.py new file mode 100644 index 0000000..cdea9b8 --- /dev/null +++ b/tests/test_dashboard_queries.py @@ -0,0 +1,168 @@ +"""Tests for dashboard query functions.""" + +from admin_analytics.config import UD_UNITID +from admin_analytics.dashboard.queries import ( + query_admin_cost_ratio, + query_expense_breakdown, + query_admin_per_student, + query_admin_faculty_ratio, + query_top_earners, + query_comp_by_role, + query_comp_vs_cpi, + query_staff_composition, + query_student_staff_ratios, + query_growth_index, + query_admin_headcount, + query_headcount_summary, +) + + +def _seed_ipeds(conn): + """Insert minimal IPEDS data for 2 years.""" + for year, inst_support, total in [(2020, 100_000, 1_000_000), (2021, 120_000, 1_100_000)]: + conn.execute( + "INSERT INTO raw_ipeds_finance (unitid, year, institutional_support_expenses, total_expenses) VALUES (?, ?, ?, ?)", + [UD_UNITID, year, inst_support, total], + ) + for year, enrollment in [(2020, 20000), (2021, 21000)]: + conn.execute( + "INSERT INTO raw_ipeds_enrollment (unitid, year, total_enrollment) VALUES (?, ?, ?)", + [UD_UNITID, year, enrollment], + ) + for year, total, faculty, mgmt in [(2020, 3000, 1500, 500), (2021, 3100, 1550, 520)]: + conn.execute( + "INSERT INTO raw_ipeds_staff (unitid, year, total_staff, faculty_total, management_total) VALUES (?, ?, ?, ?, ?)", + [UD_UNITID, year, total, faculty, mgmt], + ) + + +def _seed_cpi(conn): + """Insert CPI data for 2020-2021.""" + for year, value in [(2020, 258.8), (2021, 270.9)]: + for month in range(1, 13): + conn.execute( + "INSERT INTO raw_cpi_u (year, month, value, series_id) VALUES (?, ?, ?, ?)", + [year, month, value, "CUUR0000SA0"], + ) + + +def _seed_990(conn): + """Insert minimal 990 filing and Schedule J data.""" + conn.execute( + "INSERT INTO raw_990_filing (object_id, ein, tax_year, organization_name, total_revenue, total_expenses) " + "VALUES ('obj1', '516000297', 2021, 'UD Foundation', 50000000, 40000000)" + ) + conn.execute( + "INSERT INTO raw_990_schedule_j (object_id, ein, tax_year, person_name, title, " + "base_compensation, bonus_compensation, other_compensation, deferred_compensation, " + "nontaxable_benefits, total_compensation) " + "VALUES ('obj1', '516000297', 2021, 'JOHN DOE', 'PRESIDENT', " + "500000, 100000, 10000, 20000, 15000, 645000)" + ) + conn.execute( + "INSERT INTO raw_990_schedule_j (object_id, ein, tax_year, person_name, title, " + "base_compensation, bonus_compensation, other_compensation, deferred_compensation, " + "nontaxable_benefits, total_compensation) " + "VALUES ('obj1', '516000297', 2021, 'JANE SMITH', 'PROVOST', " + "400000, 50000, 5000, 15000, 10000, 480000)" + ) + + +class TestEmptyDatabase: + def test_admin_cost_ratio_empty(self, db_conn): + df = query_admin_cost_ratio(db_conn) + assert df.height == 0 + + def test_top_earners_empty(self, db_conn): + df = query_top_earners(db_conn) + assert df.height == 0 + + def test_headcount_empty(self, db_conn): + df = query_admin_headcount(db_conn) + assert df.height == 0 + + +class TestAdminCostRatio: + def test_returns_correct_ratio(self, db_conn): + _seed_ipeds(db_conn) + _seed_cpi(db_conn) + df = query_admin_cost_ratio(db_conn) + assert df.height == 2 + # 2020: 100000 / 1000000 = 10% + row_2020 = df.filter(df["year"] == 2020) + assert row_2020["admin_cost_pct"][0] == 10.0 + + +class TestAdminPerStudent: + def test_returns_per_student(self, db_conn): + _seed_ipeds(db_conn) + _seed_cpi(db_conn) + df = query_admin_per_student(db_conn) + assert df.height == 2 + # 2020: 100000 / 20000 = 5 + row_2020 = df.filter(df["year"] == 2020) + assert row_2020["admin_per_student"][0] == 5.0 + + +class TestAdminFacultyRatio: + def test_returns_ratio(self, db_conn): + _seed_ipeds(db_conn) + df = query_admin_faculty_ratio(db_conn) + assert df.height == 2 + # 2020: 500 / 1500 = 0.333 + row_2020 = df.filter(df["year"] == 2020) + assert row_2020["admin_faculty_ratio"][0] == 0.333 + + +class TestTopEarners: + def test_returns_all(self, db_conn): + _seed_990(db_conn) + df = query_top_earners(db_conn) + assert df.height == 2 + assert "canonical_role" in df.columns + + def test_filter_by_year(self, db_conn): + _seed_990(db_conn) + df = query_top_earners(db_conn, year=2021) + assert df.height == 2 + df_empty = query_top_earners(db_conn, year=2019) + assert df_empty.height == 0 + + +class TestCompByRole: + def test_groups_by_role(self, db_conn): + _seed_990(db_conn) + df = query_comp_by_role(db_conn) + roles = df["canonical_role"].to_list() + assert "PRESIDENT" in roles + assert "PROVOST" in roles + + +class TestCompVsCpi: + def test_returns_indexed(self, db_conn): + _seed_990(db_conn) + _seed_cpi(db_conn) + df = query_comp_vs_cpi(db_conn) + assert df.height > 0 + assert "comp_index" in df.columns + assert "cpi_index" in df.columns + + +class TestStaffComposition: + def test_computes_other(self, db_conn): + _seed_ipeds(db_conn) + df = query_staff_composition(db_conn) + assert df.height == 2 + # 2020: 3000 - 1500 - 500 = 1000 + row = df.filter(df["year"] == 2020) + assert row["other_staff"][0] == 1000 + + +class TestGrowthIndex: + def test_base_year_100(self, db_conn): + _seed_ipeds(db_conn) + df = query_growth_index(db_conn) + assert df.height == 2 + first = df.filter(df["year"] == 2020) + assert first["mgmt_index"][0] == 100.0 + assert first["enrollment_index"][0] == 100.0 diff --git a/tests/test_scraper_classify.py b/tests/test_scraper_classify.py new file mode 100644 index 0000000..143a01d --- /dev/null +++ b/tests/test_scraper_classify.py @@ -0,0 +1,76 @@ +from admin_analytics.scraper.classify import classify_title, is_overhead + + +def test_grants_analyst(): + assert classify_title("Grants Analyst II") == "GRANTS_ADMIN" + assert classify_title("Senior Grants Analyst") == "GRANTS_ADMIN" + assert classify_title("Manager, Grant Administration, Pre-Award") == "GRANTS_ADMIN" + assert classify_title("Closeout Coordinator, Research") == "GRANTS_ADMIN" + + +def test_research_staff(): + assert classify_title("Research Associate") == "RESEARCH" + assert classify_title("Associate Scientist") == "RESEARCH" + assert classify_title("Computer Scientist NIST") == "RESEARCH" + assert classify_title("Postdoctoral Researcher") == "RESEARCH" + + +def test_academic_support(): + assert classify_title("Undergraduate Academic Advisor") == "ACADEMIC_SUPPORT" + assert classify_title("Graduate Services Coordinator") == "ACADEMIC_SUPPORT" + assert classify_title("Academic Program Manager") == "ACADEMIC_SUPPORT" + + +def test_admin_support(): + assert classify_title("Administrative Assistant IV") == "ADMIN_SUPPORT" + assert classify_title("Administrative Specialist") == "ADMIN_SUPPORT" + + +def test_it(): + assert classify_title("Computing Support Specialist II") == "IT" + assert classify_title("Systems Programmer IV") == "IT" + assert classify_title("Director of Computing Operations") == "IT" + + +def test_finance(): + assert classify_title("Financial Specialist") == "FINANCE" + assert classify_title("Director, Procurement & Financial Processing") == "FINANCE" + assert classify_title("Sr. Business Officer") == "FINANCE" + + +def test_leadership(): + assert classify_title("Dean") == "LEADERSHIP" + assert classify_title("Associate Dean for Academic Affairs") == "LEADERSHIP" + assert classify_title("Chief of Staff") == "LEADERSHIP" + + +def test_communications(): + assert classify_title("Communications Director") == "COMMUNICATIONS" + assert classify_title("Digital Communications Specialist") == "COMMUNICATIONS" + + +def test_technical(): + assert classify_title("Master Machinist") == "TECHNICAL" + assert classify_title("Lab Manager") == "TECHNICAL" + assert classify_title("Lab Coordinator II") == "TECHNICAL" + + +def test_faculty_not_admin(): + assert classify_title("Adjunct Professor NIST") == "FACULTY" + assert classify_title("Affiliated Associate Professor") == "FACULTY" + + +def test_overhead_classification(): + assert is_overhead("LEADERSHIP") is True + assert is_overhead("FINANCE") is True + assert is_overhead("IT") is True + assert is_overhead("RESEARCH") is False + assert is_overhead("ACADEMIC_SUPPORT") is False + assert is_overhead("TECHNICAL") is False + assert is_overhead("GRANTS_ADMIN") is None # debatable + + +def test_unknown(): + assert classify_title("Football Coach") == "UNKNOWN" + assert classify_title(None) == "UNKNOWN" + assert classify_title("") == "UNKNOWN" diff --git a/tests/test_scraper_loader.py b/tests/test_scraper_loader.py new file mode 100644 index 0000000..73e50d0 --- /dev/null +++ b/tests/test_scraper_loader.py @@ -0,0 +1,41 @@ +from datetime import date + +from admin_analytics.scraper.directory import StaffEntry +from admin_analytics.scraper.loader import load_scrape + + +def test_load_scrape(db_conn): + entries = [ + StaffEntry(name="John Doe", title="Financial Specialist", email="jdoe@udel.edu", unit="COE Central"), + StaffEntry(name="Jane Smith", title="Research Associate", email="jsmith@udel.edu", unit="CBE"), + StaffEntry(name="Bob Jones", title="Academic Advisor", email="bjones@udel.edu", unit="ME"), + ] + + count = load_scrape(db_conn, entries, scrape_date=date(2026, 3, 30)) + assert count == 3 + + rows = db_conn.execute( + "SELECT unit, person_name, category, is_overhead FROM raw_admin_headcount ORDER BY person_name" + ).fetchall() + assert len(rows) == 3 + + # Bob Jones - Academic Advisor → ACADEMIC_SUPPORT → not overhead + assert rows[0] == ("ME", "Bob Jones", "ACADEMIC_SUPPORT", False) + # Jane Smith - Research Associate → RESEARCH → not overhead + assert rows[1] == ("CBE", "Jane Smith", "RESEARCH", False) + # John Doe - Financial Specialist → FINANCE → overhead + assert rows[2] == ("COE Central", "John Doe", "FINANCE", True) + + +def test_load_scrape_idempotent(db_conn): + entries = [ + StaffEntry(name="John Doe", title="Financial Specialist", email="jdoe@udel.edu", unit="COE Central"), + ] + d = date(2026, 3, 30) + load_scrape(db_conn, entries, scrape_date=d) + load_scrape(db_conn, entries, scrape_date=d) # second run should replace + + count = db_conn.execute( + "SELECT COUNT(*) FROM raw_admin_headcount WHERE scrape_date = ?", [d] + ).fetchone()[0] + assert count == 1 diff --git a/tests/test_validation.py b/tests/test_validation.py new file mode 100644 index 0000000..81c584d --- /dev/null +++ b/tests/test_validation.py @@ -0,0 +1,102 @@ +"""Tests for data validation module.""" + +from admin_analytics.validation import ( + validate_row_counts, + validate_null_rates, + validate_year_coverage, + validate_cross_source_consistency, + format_report, +) + + +class TestRowCounts: + def test_empty_tables(self, db_conn): + counts = validate_row_counts(db_conn) + assert counts["raw_cpi_u"] == 0 + assert counts["raw_institution"] == 0 + + def test_with_data(self, db_conn): + db_conn.execute( + "INSERT INTO raw_cpi_u (year, month, value, series_id) VALUES (2023, 1, 299.17, 'CUUR0000SA0')" + ) + db_conn.execute( + "INSERT INTO raw_cpi_u (year, month, value, series_id) VALUES (2023, 2, 300.84, 'CUUR0000SA0')" + ) + counts = validate_row_counts(db_conn) + assert counts["raw_cpi_u"] == 2 + + +class TestNullRates: + def test_empty_tables_excluded(self, db_conn): + results = validate_null_rates(db_conn) + assert "raw_cpi_u" not in results + + def test_no_nulls(self, db_conn): + db_conn.execute( + "INSERT INTO raw_cpi_u (year, month, value, series_id) VALUES (2023, 1, 299.17, 'CUUR0000SA0')" + ) + results = validate_null_rates(db_conn) + assert results["raw_cpi_u"]["year"] == 0.0 + assert results["raw_cpi_u"]["value"] == 0.0 + + def test_with_nulls(self, db_conn): + db_conn.execute( + "INSERT INTO raw_990_filing (object_id, ein, tax_year) VALUES ('f1', '123', 2023)" + ) + db_conn.execute( + "INSERT INTO raw_990_filing (object_id, ein, tax_year, total_revenue) VALUES ('f2', '123', 2023, 100)" + ) + results = validate_null_rates(db_conn) + assert results["raw_990_filing"]["total_revenue"] == 50.0 + assert results["raw_990_filing"]["ein"] == 0.0 + + +class TestYearCoverage: + def test_empty_tables(self, db_conn): + coverage = validate_year_coverage(db_conn) + assert coverage["raw_cpi_u"]["years"] == [] + + def test_with_data(self, db_conn): + for year in [2020, 2021, 2023]: + db_conn.execute( + "INSERT INTO raw_cpi_u (year, month, value, series_id) VALUES (?, 1, 100.0, 'CUUR0000SA0')", + [year], + ) + coverage = validate_year_coverage(db_conn) + assert coverage["raw_cpi_u"]["years"] == [2020, 2021, 2023] + assert 2022 in coverage["raw_cpi_u"]["gaps"] + + +class TestCrossSource: + def test_empty(self, db_conn): + result = validate_cross_source_consistency(db_conn) + assert result["years_in_all_sources"] == [] + + def test_overlap(self, db_conn): + # Add IPEDS finance + db_conn.execute( + "INSERT INTO raw_ipeds_finance (unitid, year) VALUES (130943, 2022)" + ) + db_conn.execute( + "INSERT INTO raw_ipeds_finance (unitid, year) VALUES (130943, 2023)" + ) + # Add 990 filing + db_conn.execute( + "INSERT INTO raw_990_filing (object_id, tax_year) VALUES ('f1', 2023)" + ) + # Add CPI + db_conn.execute( + "INSERT INTO raw_cpi_u (year, month, value, series_id) VALUES (2023, 1, 299.0, 'X')" + ) + result = validate_cross_source_consistency(db_conn) + assert 2023 in result["years_in_all_sources"] + assert 2022 not in result["years_in_all_sources"] + + +class TestFormatReport: + def test_runs_on_empty_db(self, db_conn): + report = format_report(db_conn) + assert "Row Counts" in report + assert "NULL Rates" in report + assert "Year Coverage" in report + assert "Cross-Source" in report diff --git a/uv.lock b/uv.lock index 9a56088..df214a3 100644 --- a/uv.lock +++ b/uv.lock @@ -7,10 +7,14 @@ name = "admin-analytics" version = "0.1.0" source = { editable = "." } dependencies = [ + { name = "beautifulsoup4" }, + { name = "dash" }, { name = "duckdb" }, { name = "httpx" }, { name = "lxml" }, { name = "openpyxl" }, + { name = "playwright" }, + { name = "plotly" }, { name = "polars" }, { name = "pyarrow" }, { name = "typer" }, @@ -25,10 +29,14 @@ dev = [ [package.metadata] requires-dist = [ + { name = "beautifulsoup4", specifier = ">=4.12" }, + { name = "dash", specifier = ">=2.17" }, { name = "duckdb", specifier = ">=1.0" }, { name = "httpx", specifier = ">=0.27" }, { name = "lxml", specifier = ">=5.0" }, { name = "openpyxl", specifier = ">=3.1" }, + { name = "playwright", specifier = ">=1.40" }, + { name = "plotly", specifier = ">=5.22" }, { name = "polars", specifier = ">=1.0" }, { name = "pyarrow", specifier = ">=17.0" }, { name = "typer", specifier = ">=0.12" }, @@ -63,6 +71,28 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/da/42/e921fccf5015463e32a3cf6ee7f980a6ed0f395ceeaa45060b61d86486c2/anyio-4.13.0-py3-none-any.whl", hash = "sha256:08b310f9e24a9594186fd75b4f73f4a4152069e3853f1ed8bfbf58369f4ad708", size = 114353, upload-time = "2026-03-24T12:59:08.246Z" }, ] +[[package]] +name = "beautifulsoup4" +version = "4.14.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "soupsieve" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c3/b0/1c6a16426d389813b48d95e26898aff79abbde42ad353958ad95cc8c9b21/beautifulsoup4-4.14.3.tar.gz", hash = "sha256:6292b1c5186d356bba669ef9f7f051757099565ad9ada5dd630bd9de5fa7fb86", size = 627737, upload-time = "2025-11-30T15:08:26.084Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/39/47f9197bdd44df24d67ac8893641e16f386c984a0619ef2ee4c51fbbc019/beautifulsoup4-4.14.3-py3-none-any.whl", hash = "sha256:0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb", size = 107721, upload-time = "2025-11-30T15:08:24.087Z" }, +] + +[[package]] +name = "blinker" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460, upload-time = "2024-11-08T17:25:47.436Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458, upload-time = "2024-11-08T17:25:46.184Z" }, +] + [[package]] name = "certifi" version = "2026.2.25" @@ -72,6 +102,95 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, ] +[[package]] +name = "charset-normalizer" +version = "3.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/60/e3bec1881450851b087e301bedc3daa9377a4d45f1c26aa90b0b235e38aa/charset_normalizer-3.4.6.tar.gz", hash = "sha256:1ae6b62897110aa7c79ea2f5dd38d1abca6db663687c0b1ad9aed6f6bae3d9d6", size = 143363, upload-time = "2026-03-15T18:53:25.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/28/ff6f234e628a2de61c458be2779cb182bc03f6eec12200d4a525bbfc9741/charset_normalizer-3.4.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:82060f995ab5003a2d6e0f4ad29065b7672b6593c8c63559beefe5b443242c3e", size = 293582, upload-time = "2026-03-15T18:50:25.454Z" }, + { url = "https://files.pythonhosted.org/packages/1c/b7/b1a117e5385cbdb3205f6055403c2a2a220c5ea80b8716c324eaf75c5c95/charset_normalizer-3.4.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60c74963d8350241a79cb8feea80e54d518f72c26db618862a8f53e5023deaf9", size = 197240, upload-time = "2026-03-15T18:50:27.196Z" }, + { url = "https://files.pythonhosted.org/packages/a1/5f/2574f0f09f3c3bc1b2f992e20bce6546cb1f17e111c5be07308dc5427956/charset_normalizer-3.4.6-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6e4333fb15c83f7d1482a76d45a0818897b3d33f00efd215528ff7c51b8e35d", size = 217363, upload-time = "2026-03-15T18:50:28.601Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d1/0ae20ad77bc949ddd39b51bf383b6ca932f2916074c95cad34ae465ab71f/charset_normalizer-3.4.6-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:bc72863f4d9aba2e8fd9085e63548a324ba706d2ea2c83b260da08a59b9482de", size = 212994, upload-time = "2026-03-15T18:50:30.102Z" }, + { url = "https://files.pythonhosted.org/packages/60/ac/3233d262a310c1b12633536a07cde5ddd16985e6e7e238e9f3f9423d8eb9/charset_normalizer-3.4.6-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9cc4fc6c196d6a8b76629a70ddfcd4635a6898756e2d9cac5565cf0654605d73", size = 204697, upload-time = "2026-03-15T18:50:31.654Z" }, + { url = "https://files.pythonhosted.org/packages/25/3c/8a18fc411f085b82303cfb7154eed5bd49c77035eb7608d049468b53f87c/charset_normalizer-3.4.6-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:0c173ce3a681f309f31b87125fecec7a5d1347261ea11ebbb856fa6006b23c8c", size = 191673, upload-time = "2026-03-15T18:50:33.433Z" }, + { url = "https://files.pythonhosted.org/packages/ff/a7/11cfe61d6c5c5c7438d6ba40919d0306ed83c9ab957f3d4da2277ff67836/charset_normalizer-3.4.6-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c907cdc8109f6c619e6254212e794d6548373cc40e1ec75e6e3823d9135d29cc", size = 201120, upload-time = "2026-03-15T18:50:35.105Z" }, + { url = "https://files.pythonhosted.org/packages/b5/10/cf491fa1abd47c02f69687046b896c950b92b6cd7337a27e6548adbec8e4/charset_normalizer-3.4.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:404a1e552cf5b675a87f0651f8b79f5f1e6fd100ee88dc612f89aa16abd4486f", size = 200911, upload-time = "2026-03-15T18:50:36.819Z" }, + { url = "https://files.pythonhosted.org/packages/28/70/039796160b48b18ed466fde0af84c1b090c4e288fae26cd674ad04a2d703/charset_normalizer-3.4.6-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:e3c701e954abf6fc03a49f7c579cc80c2c6cc52525340ca3186c41d3f33482ef", size = 192516, upload-time = "2026-03-15T18:50:38.228Z" }, + { url = "https://files.pythonhosted.org/packages/ff/34/c56f3223393d6ff3124b9e78f7de738047c2d6bc40a4f16ac0c9d7a1cb3c/charset_normalizer-3.4.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7a6967aaf043bceabab5412ed6bd6bd26603dae84d5cb75bf8d9a74a4959d398", size = 218795, upload-time = "2026-03-15T18:50:39.664Z" }, + { url = "https://files.pythonhosted.org/packages/e8/3b/ce2d4f86c5282191a041fdc5a4ce18f1c6bd40a5bd1f74cf8625f08d51c1/charset_normalizer-3.4.6-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:5feb91325bbceade6afab43eb3b508c63ee53579fe896c77137ded51c6b6958e", size = 201833, upload-time = "2026-03-15T18:50:41.552Z" }, + { url = "https://files.pythonhosted.org/packages/3b/9b/b6a9f76b0fd7c5b5ec58b228ff7e85095370282150f0bd50b3126f5506d6/charset_normalizer-3.4.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f820f24b09e3e779fe84c3c456cb4108a7aa639b0d1f02c28046e11bfcd088ed", size = 213920, upload-time = "2026-03-15T18:50:43.33Z" }, + { url = "https://files.pythonhosted.org/packages/ae/98/7bc23513a33d8172365ed30ee3a3b3fe1ece14a395e5fc94129541fc6003/charset_normalizer-3.4.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b35b200d6a71b9839a46b9b7fff66b6638bb52fc9658aa58796b0326595d3021", size = 206951, upload-time = "2026-03-15T18:50:44.789Z" }, + { url = "https://files.pythonhosted.org/packages/32/73/c0b86f3d1458468e11aec870e6b3feac931facbe105a894b552b0e518e79/charset_normalizer-3.4.6-cp311-cp311-win32.whl", hash = "sha256:9ca4c0b502ab399ef89248a2c84c54954f77a070f28e546a85e91da627d1301e", size = 143703, upload-time = "2026-03-15T18:50:46.103Z" }, + { url = "https://files.pythonhosted.org/packages/c6/e3/76f2facfe8eddee0bbd38d2594e709033338eae44ebf1738bcefe0a06185/charset_normalizer-3.4.6-cp311-cp311-win_amd64.whl", hash = "sha256:a9e68c9d88823b274cf1e72f28cb5dc89c990edf430b0bfd3e2fb0785bfeabf4", size = 153857, upload-time = "2026-03-15T18:50:47.563Z" }, + { url = "https://files.pythonhosted.org/packages/e2/dc/9abe19c9b27e6cd3636036b9d1b387b78c40dedbf0b47f9366737684b4b0/charset_normalizer-3.4.6-cp311-cp311-win_arm64.whl", hash = "sha256:97d0235baafca5f2b09cf332cc275f021e694e8362c6bb9c96fc9a0eb74fc316", size = 142751, upload-time = "2026-03-15T18:50:49.234Z" }, + { url = "https://files.pythonhosted.org/packages/e5/62/c0815c992c9545347aeea7859b50dc9044d147e2e7278329c6e02ac9a616/charset_normalizer-3.4.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2ef7fedc7a6ecbe99969cd09632516738a97eeb8bd7258bf8a0f23114c057dab", size = 295154, upload-time = "2026-03-15T18:50:50.88Z" }, + { url = "https://files.pythonhosted.org/packages/a8/37/bdca6613c2e3c58c7421891d80cc3efa1d32e882f7c4a7ee6039c3fc951a/charset_normalizer-3.4.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a4ea868bc28109052790eb2b52a9ab33f3aa7adc02f96673526ff47419490e21", size = 199191, upload-time = "2026-03-15T18:50:52.658Z" }, + { url = "https://files.pythonhosted.org/packages/6c/92/9934d1bbd69f7f398b38c5dae1cbf9cc672e7c34a4adf7b17c0a9c17d15d/charset_normalizer-3.4.6-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:836ab36280f21fc1a03c99cd05c6b7af70d2697e374c7af0b61ed271401a72a2", size = 218674, upload-time = "2026-03-15T18:50:54.102Z" }, + { url = "https://files.pythonhosted.org/packages/af/90/25f6ab406659286be929fd89ab0e78e38aa183fc374e03aa3c12d730af8a/charset_normalizer-3.4.6-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f1ce721c8a7dfec21fcbdfe04e8f68174183cf4e8188e0645e92aa23985c57ff", size = 215259, upload-time = "2026-03-15T18:50:55.616Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ef/79a463eb0fff7f96afa04c1d4c51f8fc85426f918db467854bfb6a569ce3/charset_normalizer-3.4.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e28d62a8fc7a1fa411c43bd65e346f3bce9716dc51b897fbe930c5987b402d5", size = 207276, upload-time = "2026-03-15T18:50:57.054Z" }, + { url = "https://files.pythonhosted.org/packages/f7/72/d0426afec4b71dc159fa6b4e68f868cd5a3ecd918fec5813a15d292a7d10/charset_normalizer-3.4.6-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:530d548084c4a9f7a16ed4a294d459b4f229db50df689bfe92027452452943a0", size = 195161, upload-time = "2026-03-15T18:50:58.686Z" }, + { url = "https://files.pythonhosted.org/packages/bf/18/c82b06a68bfcb6ce55e508225d210c7e6a4ea122bfc0748892f3dc4e8e11/charset_normalizer-3.4.6-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:30f445ae60aad5e1f8bdbb3108e39f6fbc09f4ea16c815c66578878325f8f15a", size = 203452, upload-time = "2026-03-15T18:51:00.196Z" }, + { url = "https://files.pythonhosted.org/packages/44/d6/0c25979b92f8adafdbb946160348d8d44aa60ce99afdc27df524379875cb/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ac2393c73378fea4e52aa56285a3d64be50f1a12395afef9cce47772f60334c2", size = 202272, upload-time = "2026-03-15T18:51:01.703Z" }, + { url = "https://files.pythonhosted.org/packages/2e/3d/7fea3e8fe84136bebbac715dd1221cc25c173c57a699c030ab9b8900cbb7/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:90ca27cd8da8118b18a52d5f547859cc1f8354a00cd1e8e5120df3e30d6279e5", size = 195622, upload-time = "2026-03-15T18:51:03.526Z" }, + { url = "https://files.pythonhosted.org/packages/57/8a/d6f7fd5cb96c58ef2f681424fbca01264461336d2a7fc875e4446b1f1346/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8e5a94886bedca0f9b78fecd6afb6629142fd2605aa70a125d49f4edc6037ee6", size = 220056, upload-time = "2026-03-15T18:51:05.269Z" }, + { url = "https://files.pythonhosted.org/packages/16/50/478cdda782c8c9c3fb5da3cc72dd7f331f031e7f1363a893cdd6ca0f8de0/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:695f5c2823691a25f17bc5d5ffe79fa90972cc34b002ac6c843bb8a1720e950d", size = 203751, upload-time = "2026-03-15T18:51:06.858Z" }, + { url = "https://files.pythonhosted.org/packages/75/fc/cc2fcac943939c8e4d8791abfa139f685e5150cae9f94b60f12520feaa9b/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:231d4da14bcd9301310faf492051bee27df11f2bc7549bc0bb41fef11b82daa2", size = 216563, upload-time = "2026-03-15T18:51:08.564Z" }, + { url = "https://files.pythonhosted.org/packages/a8/b7/a4add1d9a5f68f3d037261aecca83abdb0ab15960a3591d340e829b37298/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a056d1ad2633548ca18ffa2f85c202cfb48b68615129143915b8dc72a806a923", size = 209265, upload-time = "2026-03-15T18:51:10.312Z" }, + { url = "https://files.pythonhosted.org/packages/6c/18/c094561b5d64a24277707698e54b7f67bd17a4f857bbfbb1072bba07c8bf/charset_normalizer-3.4.6-cp312-cp312-win32.whl", hash = "sha256:c2274ca724536f173122f36c98ce188fd24ce3dad886ec2b7af859518ce008a4", size = 144229, upload-time = "2026-03-15T18:51:11.694Z" }, + { url = "https://files.pythonhosted.org/packages/ab/20/0567efb3a8fd481b8f34f739ebddc098ed062a59fed41a8d193a61939e8f/charset_normalizer-3.4.6-cp312-cp312-win_amd64.whl", hash = "sha256:c8ae56368f8cc97c7e40a7ee18e1cedaf8e780cd8bc5ed5ac8b81f238614facb", size = 154277, upload-time = "2026-03-15T18:51:13.004Z" }, + { url = "https://files.pythonhosted.org/packages/15/57/28d79b44b51933119e21f65479d0864a8d5893e494cf5daab15df0247c17/charset_normalizer-3.4.6-cp312-cp312-win_arm64.whl", hash = "sha256:899d28f422116b08be5118ef350c292b36fc15ec2daeb9ea987c89281c7bb5c4", size = 142817, upload-time = "2026-03-15T18:51:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/1e/1d/4fdabeef4e231153b6ed7567602f3b68265ec4e5b76d6024cf647d43d981/charset_normalizer-3.4.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:11afb56037cbc4b1555a34dd69151e8e069bee82e613a73bef6e714ce733585f", size = 294823, upload-time = "2026-03-15T18:51:15.755Z" }, + { url = "https://files.pythonhosted.org/packages/47/7b/20e809b89c69d37be748d98e84dce6820bf663cf19cf6b942c951a3e8f41/charset_normalizer-3.4.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:423fb7e748a08f854a08a222b983f4df1912b1daedce51a72bd24fe8f26a1843", size = 198527, upload-time = "2026-03-15T18:51:17.177Z" }, + { url = "https://files.pythonhosted.org/packages/37/a6/4f8d27527d59c039dce6f7622593cdcd3d70a8504d87d09eb11e9fdc6062/charset_normalizer-3.4.6-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d73beaac5e90173ac3deb9928a74763a6d230f494e4bfb422c217a0ad8e629bf", size = 218388, upload-time = "2026-03-15T18:51:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/f6/9b/4770ccb3e491a9bacf1c46cc8b812214fe367c86a96353ccc6daf87b01ec/charset_normalizer-3.4.6-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d60377dce4511655582e300dc1e5a5f24ba0cb229005a1d5c8d0cb72bb758ab8", size = 214563, upload-time = "2026-03-15T18:51:20.374Z" }, + { url = "https://files.pythonhosted.org/packages/2b/58/a199d245894b12db0b957d627516c78e055adc3a0d978bc7f65ddaf7c399/charset_normalizer-3.4.6-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:530e8cebeea0d76bdcf93357aa5e41336f48c3dc709ac52da2bb167c5b8271d9", size = 206587, upload-time = "2026-03-15T18:51:21.807Z" }, + { url = "https://files.pythonhosted.org/packages/7e/70/3def227f1ec56f5c69dfc8392b8bd63b11a18ca8178d9211d7cc5e5e4f27/charset_normalizer-3.4.6-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:a26611d9987b230566f24a0a125f17fe0de6a6aff9f25c9f564aaa2721a5fb88", size = 194724, upload-time = "2026-03-15T18:51:23.508Z" }, + { url = "https://files.pythonhosted.org/packages/58/ab/9318352e220c05efd31c2779a23b50969dc94b985a2efa643ed9077bfca5/charset_normalizer-3.4.6-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:34315ff4fc374b285ad7f4a0bf7dcbfe769e1b104230d40f49f700d4ab6bbd84", size = 202956, upload-time = "2026-03-15T18:51:25.239Z" }, + { url = "https://files.pythonhosted.org/packages/75/13/f3550a3ac25b70f87ac98c40d3199a8503676c2f1620efbf8d42095cfc40/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ddd609f9e1af8c7bd6e2aca279c931aefecd148a14402d4e368f3171769fd", size = 201923, upload-time = "2026-03-15T18:51:26.682Z" }, + { url = "https://files.pythonhosted.org/packages/1b/db/c5c643b912740b45e8eec21de1bbab8e7fc085944d37e1e709d3dcd9d72f/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:80d0a5615143c0b3225e5e3ef22c8d5d51f3f72ce0ea6fb84c943546c7b25b6c", size = 195366, upload-time = "2026-03-15T18:51:28.129Z" }, + { url = "https://files.pythonhosted.org/packages/5a/67/3b1c62744f9b2448443e0eb160d8b001c849ec3fef591e012eda6484787c/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:92734d4d8d187a354a556626c221cd1a892a4e0802ccb2af432a1d85ec012194", size = 219752, upload-time = "2026-03-15T18:51:29.556Z" }, + { url = "https://files.pythonhosted.org/packages/f6/98/32ffbaf7f0366ffb0445930b87d103f6b406bc2c271563644bde8a2b1093/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:613f19aa6e082cf96e17e3ffd89383343d0d589abda756b7764cf78361fd41dc", size = 203296, upload-time = "2026-03-15T18:51:30.921Z" }, + { url = "https://files.pythonhosted.org/packages/41/12/5d308c1bbe60cabb0c5ef511574a647067e2a1f631bc8634fcafaccd8293/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:2b1a63e8224e401cafe7739f77efd3f9e7f5f2026bda4aead8e59afab537784f", size = 215956, upload-time = "2026-03-15T18:51:32.399Z" }, + { url = "https://files.pythonhosted.org/packages/53/e9/5f85f6c5e20669dbe56b165c67b0260547dea97dba7e187938833d791687/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6cceb5473417d28edd20c6c984ab6fee6c6267d38d906823ebfe20b03d607dc2", size = 208652, upload-time = "2026-03-15T18:51:34.214Z" }, + { url = "https://files.pythonhosted.org/packages/f1/11/897052ea6af56df3eef3ca94edafee410ca699ca0c7b87960ad19932c55e/charset_normalizer-3.4.6-cp313-cp313-win32.whl", hash = "sha256:d7de2637729c67d67cf87614b566626057e95c303bc0a55ffe391f5205e7003d", size = 143940, upload-time = "2026-03-15T18:51:36.15Z" }, + { url = "https://files.pythonhosted.org/packages/a1/5c/724b6b363603e419829f561c854b87ed7c7e31231a7908708ac086cdf3e2/charset_normalizer-3.4.6-cp313-cp313-win_amd64.whl", hash = "sha256:572d7c822caf521f0525ba1bce1a622a0b85cf47ffbdae6c9c19e3b5ac3c4389", size = 154101, upload-time = "2026-03-15T18:51:37.876Z" }, + { url = "https://files.pythonhosted.org/packages/01/a5/7abf15b4c0968e47020f9ca0935fb3274deb87cb288cd187cad92e8cdffd/charset_normalizer-3.4.6-cp313-cp313-win_arm64.whl", hash = "sha256:a4474d924a47185a06411e0064b803c68be044be2d60e50e8bddcc2649957c1f", size = 143109, upload-time = "2026-03-15T18:51:39.565Z" }, + { url = "https://files.pythonhosted.org/packages/25/6f/ffe1e1259f384594063ea1869bfb6be5cdb8bc81020fc36c3636bc8302a1/charset_normalizer-3.4.6-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:9cc6e6d9e571d2f863fa77700701dae73ed5f78881efc8b3f9a4398772ff53e8", size = 294458, upload-time = "2026-03-15T18:51:41.134Z" }, + { url = "https://files.pythonhosted.org/packages/56/60/09bb6c13a8c1016c2ed5c6a6488e4ffef506461aa5161662bd7636936fb1/charset_normalizer-3.4.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef5960d965e67165d75b7c7ffc60a83ec5abfc5c11b764ec13ea54fbef8b4421", size = 199277, upload-time = "2026-03-15T18:51:42.953Z" }, + { url = "https://files.pythonhosted.org/packages/00/50/dcfbb72a5138bbefdc3332e8d81a23494bf67998b4b100703fd15fa52d81/charset_normalizer-3.4.6-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b3694e3f87f8ac7ce279d4355645b3c878d24d1424581b46282f24b92f5a4ae2", size = 218758, upload-time = "2026-03-15T18:51:44.339Z" }, + { url = "https://files.pythonhosted.org/packages/03/b3/d79a9a191bb75f5aa81f3aaaa387ef29ce7cb7a9e5074ba8ea095cc073c2/charset_normalizer-3.4.6-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5d11595abf8dd942a77883a39d81433739b287b6aa71620f15164f8096221b30", size = 215299, upload-time = "2026-03-15T18:51:45.871Z" }, + { url = "https://files.pythonhosted.org/packages/76/7e/bc8911719f7084f72fd545f647601ea3532363927f807d296a8c88a62c0d/charset_normalizer-3.4.6-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7bda6eebafd42133efdca535b04ccb338ab29467b3f7bf79569883676fc628db", size = 206811, upload-time = "2026-03-15T18:51:47.308Z" }, + { url = "https://files.pythonhosted.org/packages/e2/40/c430b969d41dda0c465aa36cc7c2c068afb67177bef50905ac371b28ccc7/charset_normalizer-3.4.6-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:bbc8c8650c6e51041ad1be191742b8b421d05bbd3410f43fa2a00c8db87678e8", size = 193706, upload-time = "2026-03-15T18:51:48.849Z" }, + { url = "https://files.pythonhosted.org/packages/48/15/e35e0590af254f7df984de1323640ef375df5761f615b6225ba8deb9799a/charset_normalizer-3.4.6-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:22c6f0c2fbc31e76c3b8a86fba1a56eda6166e238c29cdd3d14befdb4a4e4815", size = 202706, upload-time = "2026-03-15T18:51:50.257Z" }, + { url = "https://files.pythonhosted.org/packages/5e/bd/f736f7b9cc5e93a18b794a50346bb16fbfd6b37f99e8f306f7951d27c17c/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7edbed096e4a4798710ed6bc75dcaa2a21b68b6c356553ac4823c3658d53743a", size = 202497, upload-time = "2026-03-15T18:51:52.012Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ba/2cc9e3e7dfdf7760a6ed8da7446d22536f3d0ce114ac63dee2a5a3599e62/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:7f9019c9cb613f084481bd6a100b12e1547cf2efe362d873c2e31e4035a6fa43", size = 193511, upload-time = "2026-03-15T18:51:53.723Z" }, + { url = "https://files.pythonhosted.org/packages/9e/cb/5be49b5f776e5613be07298c80e1b02a2d900f7a7de807230595c85a8b2e/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:58c948d0d086229efc484fe2f30c2d382c86720f55cd9bc33591774348ad44e0", size = 220133, upload-time = "2026-03-15T18:51:55.333Z" }, + { url = "https://files.pythonhosted.org/packages/83/43/99f1b5dad345accb322c80c7821071554f791a95ee50c1c90041c157ae99/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:419a9d91bd238052642a51938af8ac05da5b3343becde08d5cdeab9046df9ee1", size = 203035, upload-time = "2026-03-15T18:51:56.736Z" }, + { url = "https://files.pythonhosted.org/packages/87/9a/62c2cb6a531483b55dddff1a68b3d891a8b498f3ca555fbcf2978e804d9d/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5273b9f0b5835ff0350c0828faea623c68bfa65b792720c453e22b25cc72930f", size = 216321, upload-time = "2026-03-15T18:51:58.17Z" }, + { url = "https://files.pythonhosted.org/packages/6e/79/94a010ff81e3aec7c293eb82c28f930918e517bc144c9906a060844462eb/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:0e901eb1049fdb80f5bd11ed5ea1e498ec423102f7a9b9e4645d5b8204ff2815", size = 208973, upload-time = "2026-03-15T18:51:59.998Z" }, + { url = "https://files.pythonhosted.org/packages/2a/57/4ecff6d4ec8585342f0c71bc03efaa99cb7468f7c91a57b105bcd561cea8/charset_normalizer-3.4.6-cp314-cp314-win32.whl", hash = "sha256:b4ff1d35e8c5bd078be89349b6f3a845128e685e751b6ea1169cf2160b344c4d", size = 144610, upload-time = "2026-03-15T18:52:02.213Z" }, + { url = "https://files.pythonhosted.org/packages/80/94/8434a02d9d7f168c25767c64671fead8d599744a05d6a6c877144c754246/charset_normalizer-3.4.6-cp314-cp314-win_amd64.whl", hash = "sha256:74119174722c4349af9708993118581686f343adc1c8c9c007d59be90d077f3f", size = 154962, upload-time = "2026-03-15T18:52:03.658Z" }, + { url = "https://files.pythonhosted.org/packages/46/4c/48f2cdbfd923026503dfd67ccea45c94fd8fe988d9056b468579c66ed62b/charset_normalizer-3.4.6-cp314-cp314-win_arm64.whl", hash = "sha256:e5bcc1a1ae744e0bb59641171ae53743760130600da8db48cbb6e4918e186e4e", size = 143595, upload-time = "2026-03-15T18:52:05.123Z" }, + { url = "https://files.pythonhosted.org/packages/31/93/8878be7569f87b14f1d52032946131bcb6ebbd8af3e20446bc04053dc3f1/charset_normalizer-3.4.6-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:ad8faf8df23f0378c6d527d8b0b15ea4a2e23c89376877c598c4870d1b2c7866", size = 314828, upload-time = "2026-03-15T18:52:06.831Z" }, + { url = "https://files.pythonhosted.org/packages/06/b6/fae511ca98aac69ecc35cde828b0a3d146325dd03d99655ad38fc2cc3293/charset_normalizer-3.4.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f5ea69428fa1b49573eef0cc44a1d43bebd45ad0c611eb7d7eac760c7ae771bc", size = 208138, upload-time = "2026-03-15T18:52:08.239Z" }, + { url = "https://files.pythonhosted.org/packages/54/57/64caf6e1bf07274a1e0b7c160a55ee9e8c9ec32c46846ce59b9c333f7008/charset_normalizer-3.4.6-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:06a7e86163334edfc5d20fe104db92fcd666e5a5df0977cb5680a506fe26cc8e", size = 224679, upload-time = "2026-03-15T18:52:10.043Z" }, + { url = "https://files.pythonhosted.org/packages/aa/cb/9ff5a25b9273ef160861b41f6937f86fae18b0792fe0a8e75e06acb08f1d/charset_normalizer-3.4.6-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e1f6e2f00a6b8edb562826e4632e26d063ac10307e80f7461f7de3ad8ef3f077", size = 223475, upload-time = "2026-03-15T18:52:11.854Z" }, + { url = "https://files.pythonhosted.org/packages/fc/97/440635fc093b8d7347502a377031f9605a1039c958f3cd18dcacffb37743/charset_normalizer-3.4.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95b52c68d64c1878818687a473a10547b3292e82b6f6fe483808fb1468e2f52f", size = 215230, upload-time = "2026-03-15T18:52:13.325Z" }, + { url = "https://files.pythonhosted.org/packages/cd/24/afff630feb571a13f07c8539fbb502d2ab494019492aaffc78ef41f1d1d0/charset_normalizer-3.4.6-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:7504e9b7dc05f99a9bbb4525c67a2c155073b44d720470a148b34166a69c054e", size = 199045, upload-time = "2026-03-15T18:52:14.752Z" }, + { url = "https://files.pythonhosted.org/packages/e5/17/d1399ecdaf7e0498c327433e7eefdd862b41236a7e484355b8e0e5ebd64b/charset_normalizer-3.4.6-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:172985e4ff804a7ad08eebec0a1640ece87ba5041d565fff23c8f99c1f389484", size = 211658, upload-time = "2026-03-15T18:52:16.278Z" }, + { url = "https://files.pythonhosted.org/packages/b5/38/16baa0affb957b3d880e5ac2144caf3f9d7de7bc4a91842e447fbb5e8b67/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:4be9f4830ba8741527693848403e2c457c16e499100963ec711b1c6f2049b7c7", size = 210769, upload-time = "2026-03-15T18:52:17.782Z" }, + { url = "https://files.pythonhosted.org/packages/05/34/c531bc6ac4c21da9ddfddb3107be2287188b3ea4b53b70fc58f2a77ac8d8/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:79090741d842f564b1b2827c0b82d846405b744d31e84f18d7a7b41c20e473ff", size = 201328, upload-time = "2026-03-15T18:52:19.553Z" }, + { url = "https://files.pythonhosted.org/packages/fa/73/a5a1e9ca5f234519c1953608a03fe109c306b97fdfb25f09182babad51a7/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:87725cfb1a4f1f8c2fc9890ae2f42094120f4b44db9360be5d99a4c6b0e03a9e", size = 225302, upload-time = "2026-03-15T18:52:21.043Z" }, + { url = "https://files.pythonhosted.org/packages/ba/f6/cd782923d112d296294dea4bcc7af5a7ae0f86ab79f8fefbda5526b6cfc0/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:fcce033e4021347d80ed9c66dcf1e7b1546319834b74445f561d2e2221de5659", size = 211127, upload-time = "2026-03-15T18:52:22.491Z" }, + { url = "https://files.pythonhosted.org/packages/0e/c5/0b6898950627af7d6103a449b22320372c24c6feda91aa24e201a478d161/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:ca0276464d148c72defa8bb4390cce01b4a0e425f3b50d1435aa6d7a18107602", size = 222840, upload-time = "2026-03-15T18:52:24.113Z" }, + { url = "https://files.pythonhosted.org/packages/7d/25/c4bba773bef442cbdc06111d40daa3de5050a676fa26e85090fc54dd12f0/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:197c1a244a274bb016dd8b79204850144ef77fe81c5b797dc389327adb552407", size = 216890, upload-time = "2026-03-15T18:52:25.541Z" }, + { url = "https://files.pythonhosted.org/packages/35/1a/05dacadb0978da72ee287b0143097db12f2e7e8d3ffc4647da07a383b0b7/charset_normalizer-3.4.6-cp314-cp314t-win32.whl", hash = "sha256:2a24157fa36980478dd1770b585c0f30d19e18f4fb0c47c13aa568f871718579", size = 155379, upload-time = "2026-03-15T18:52:27.05Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7a/d269d834cb3a76291651256f3b9a5945e81d0a49ab9f4a498964e83c0416/charset_normalizer-3.4.6-cp314-cp314t-win_amd64.whl", hash = "sha256:cd5e2801c89992ed8c0a3f0293ae83c159a60d9a5d685005383ef4caca77f2c4", size = 169043, upload-time = "2026-03-15T18:52:28.502Z" }, + { url = "https://files.pythonhosted.org/packages/23/06/28b29fba521a37a8932c6a84192175c34d49f84a6d4773fa63d05f9aff22/charset_normalizer-3.4.6-cp314-cp314t-win_arm64.whl", hash = "sha256:47955475ac79cc504ef2704b192364e51d0d473ad452caedd0002605f780101c", size = 148523, upload-time = "2026-03-15T18:52:29.956Z" }, + { url = "https://files.pythonhosted.org/packages/2a/68/687187c7e26cb24ccbd88e5069f5ef00eba804d36dde11d99aad0838ab45/charset_normalizer-3.4.6-py3-none-any.whl", hash = "sha256:947cf925bc916d90adba35a64c82aace04fa39b46b52d4630ece166655905a69", size = 61455, upload-time = "2026-03-15T18:53:23.833Z" }, +] + [[package]] name = "click" version = "8.3.1" @@ -93,6 +212,26 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, ] +[[package]] +name = "dash" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "flask" }, + { name = "importlib-metadata" }, + { name = "nest-asyncio" }, + { name = "plotly" }, + { name = "requests" }, + { name = "retrying" }, + { name = "setuptools" }, + { name = "typing-extensions" }, + { name = "werkzeug" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/44/da/a13ae3a6528bd51a6901461dbff4549c6009de203d6249a89b9a09ac5cfb/dash-4.1.0.tar.gz", hash = "sha256:17a92a87b0c1eacc025079a705e44e72cd4c5794629c0a2909942b611faeb595", size = 6927689, upload-time = "2026-03-23T20:39:47.578Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/00/10b1f8b3885fc4add1853e9603af15c593fa0be20d37c158c4d811e868dc/dash-4.1.0-py3-none-any.whl", hash = "sha256:1af9f302bc14061061012cdb129b7e370d3604b12a7f730b252ad8e4966f01f7", size = 7232489, upload-time = "2026-03-23T20:39:40.658Z" }, +] + [[package]] name = "duckdb" version = "1.5.1" @@ -138,6 +277,75 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c1/8b/5fe2cc11fee489817272089c4203e679c63b570a5aaeb18d852ae3cbba6a/et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa", size = 18059, upload-time = "2024-10-25T17:25:39.051Z" }, ] +[[package]] +name = "flask" +version = "3.1.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "blinker" }, + { name = "click" }, + { name = "itsdangerous" }, + { name = "jinja2" }, + { name = "markupsafe" }, + { name = "werkzeug" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/26/00/35d85dcce6c57fdc871f3867d465d780f302a175ea360f62533f12b27e2b/flask-3.1.3.tar.gz", hash = "sha256:0ef0e52b8a9cd932855379197dd8f94047b359ca0a78695144304cb45f87c9eb", size = 759004, upload-time = "2026-02-19T05:00:57.678Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/9c/34f6962f9b9e9c71f6e5ed806e0d0ff03c9d1b0b2340088a0cf4bce09b18/flask-3.1.3-py3-none-any.whl", hash = "sha256:f4bcbefc124291925f1a26446da31a5178f9483862233b23c0c96a20701f670c", size = 103424, upload-time = "2026-02-19T05:00:56.027Z" }, +] + +[[package]] +name = "greenlet" +version = "3.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/51/1664f6b78fc6ebbd98019a1fd730e83fa78f2db7058f72b1463d3612b8db/greenlet-3.3.2.tar.gz", hash = "sha256:2eaf067fc6d886931c7962e8c6bede15d2f01965560f3359b27c80bde2d151f2", size = 188267, upload-time = "2026-02-20T20:54:15.531Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/47/16400cb42d18d7a6bb46f0626852c1718612e35dcb0dffa16bbaffdf5dd2/greenlet-3.3.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:c56692189a7d1c7606cb794be0a8381470d95c57ce5be03fb3d0ef57c7853b86", size = 278890, upload-time = "2026-02-20T20:19:39.263Z" }, + { url = "https://files.pythonhosted.org/packages/a3/90/42762b77a5b6aa96cd8c0e80612663d39211e8ae8a6cd47c7f1249a66262/greenlet-3.3.2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ebd458fa8285960f382841da585e02201b53a5ec2bac6b156fc623b5ce4499f", size = 581120, upload-time = "2026-02-20T20:47:30.161Z" }, + { url = "https://files.pythonhosted.org/packages/bf/6f/f3d64f4fa0a9c7b5c5b3c810ff1df614540d5aa7d519261b53fba55d4df9/greenlet-3.3.2-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a443358b33c4ec7b05b79a7c8b466f5d275025e750298be7340f8fc63dff2a55", size = 594363, upload-time = "2026-02-20T20:55:56.965Z" }, + { url = "https://files.pythonhosted.org/packages/9c/8b/1430a04657735a3f23116c2e0d5eb10220928846e4537a938a41b350bed6/greenlet-3.3.2-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4375a58e49522698d3e70cc0b801c19433021b5c37686f7ce9c65b0d5c8677d2", size = 605046, upload-time = "2026-02-20T21:02:45.234Z" }, + { url = "https://files.pythonhosted.org/packages/72/83/3e06a52aca8128bdd4dcd67e932b809e76a96ab8c232a8b025b2850264c5/greenlet-3.3.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e2cd90d413acbf5e77ae41e5d3c9b3ac1d011a756d7284d7f3f2b806bbd6358", size = 594156, upload-time = "2026-02-20T20:20:59.955Z" }, + { url = "https://files.pythonhosted.org/packages/70/79/0de5e62b873e08fe3cef7dbe84e5c4bc0e8ed0c7ff131bccb8405cd107c8/greenlet-3.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:442b6057453c8cb29b4fb36a2ac689382fc71112273726e2423f7f17dc73bf99", size = 1554649, upload-time = "2026-02-20T20:49:32.293Z" }, + { url = "https://files.pythonhosted.org/packages/5a/00/32d30dee8389dc36d42170a9c66217757289e2afb0de59a3565260f38373/greenlet-3.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:45abe8eb6339518180d5a7fa47fa01945414d7cca5ecb745346fc6a87d2750be", size = 1619472, upload-time = "2026-02-20T20:21:07.966Z" }, + { url = "https://files.pythonhosted.org/packages/f1/3a/efb2cf697fbccdf75b24e2c18025e7dfa54c4f31fab75c51d0fe79942cef/greenlet-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e692b2dae4cc7077cbb11b47d258533b48c8fde69a33d0d8a82e2fe8d8531d5", size = 230389, upload-time = "2026-02-20T20:17:18.772Z" }, + { url = "https://files.pythonhosted.org/packages/e1/a1/65bbc059a43a7e2143ec4fc1f9e3f673e04f9c7b371a494a101422ac4fd5/greenlet-3.3.2-cp311-cp311-win_arm64.whl", hash = "sha256:02b0a8682aecd4d3c6c18edf52bc8e51eacdd75c8eac52a790a210b06aa295fd", size = 229645, upload-time = "2026-02-20T20:18:18.695Z" }, + { url = "https://files.pythonhosted.org/packages/ea/ab/1608e5a7578e62113506740b88066bf09888322a311cff602105e619bd87/greenlet-3.3.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:ac8d61d4343b799d1e526db579833d72f23759c71e07181c2d2944e429eb09cd", size = 280358, upload-time = "2026-02-20T20:17:43.971Z" }, + { url = "https://files.pythonhosted.org/packages/a5/23/0eae412a4ade4e6623ff7626e38998cb9b11e9ff1ebacaa021e4e108ec15/greenlet-3.3.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ceec72030dae6ac0c8ed7591b96b70410a8be370b6a477b1dbc072856ad02bd", size = 601217, upload-time = "2026-02-20T20:47:31.462Z" }, + { url = "https://files.pythonhosted.org/packages/f8/16/5b1678a9c07098ecb9ab2dd159fafaf12e963293e61ee8d10ecb55273e5e/greenlet-3.3.2-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2a5be83a45ce6188c045bcc44b0ee037d6a518978de9a5d97438548b953a1ac", size = 611792, upload-time = "2026-02-20T20:55:58.423Z" }, + { url = "https://files.pythonhosted.org/packages/5c/c5/cc09412a29e43406eba18d61c70baa936e299bc27e074e2be3806ed29098/greenlet-3.3.2-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ae9e21c84035c490506c17002f5c8ab25f980205c3e61ddb3a2a2a2e6c411fcb", size = 626250, upload-time = "2026-02-20T21:02:46.596Z" }, + { url = "https://files.pythonhosted.org/packages/50/1f/5155f55bd71cabd03765a4aac9ac446be129895271f73872c36ebd4b04b6/greenlet-3.3.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43e99d1749147ac21dde49b99c9abffcbc1e2d55c67501465ef0930d6e78e070", size = 613875, upload-time = "2026-02-20T20:21:01.102Z" }, + { url = "https://files.pythonhosted.org/packages/fc/dd/845f249c3fcd69e32df80cdab059b4be8b766ef5830a3d0aa9d6cad55beb/greenlet-3.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c956a19350e2c37f2c48b336a3afb4bff120b36076d9d7fb68cb44e05d95b79", size = 1571467, upload-time = "2026-02-20T20:49:33.495Z" }, + { url = "https://files.pythonhosted.org/packages/2a/50/2649fe21fcc2b56659a452868e695634722a6655ba245d9f77f5656010bf/greenlet-3.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6c6f8ba97d17a1e7d664151284cb3315fc5f8353e75221ed4324f84eb162b395", size = 1640001, upload-time = "2026-02-20T20:21:09.154Z" }, + { url = "https://files.pythonhosted.org/packages/9b/40/cc802e067d02af8b60b6771cea7d57e21ef5e6659912814babb42b864713/greenlet-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:34308836d8370bddadb41f5a7ce96879b72e2fdfb4e87729330c6ab52376409f", size = 231081, upload-time = "2026-02-20T20:17:28.121Z" }, + { url = "https://files.pythonhosted.org/packages/58/2e/fe7f36ff1982d6b10a60d5e0740c759259a7d6d2e1dc41da6d96de32fff6/greenlet-3.3.2-cp312-cp312-win_arm64.whl", hash = "sha256:d3a62fa76a32b462a97198e4c9e99afb9ab375115e74e9a83ce180e7a496f643", size = 230331, upload-time = "2026-02-20T20:17:23.34Z" }, + { url = "https://files.pythonhosted.org/packages/ac/48/f8b875fa7dea7dd9b33245e37f065af59df6a25af2f9561efa8d822fde51/greenlet-3.3.2-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:aa6ac98bdfd716a749b84d4034486863fd81c3abde9aa3cf8eff9127981a4ae4", size = 279120, upload-time = "2026-02-20T20:19:01.9Z" }, + { url = "https://files.pythonhosted.org/packages/49/8d/9771d03e7a8b1ee456511961e1b97a6d77ae1dea4a34a5b98eee706689d3/greenlet-3.3.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab0c7e7901a00bc0a7284907273dc165b32e0d109a6713babd04471327ff7986", size = 603238, upload-time = "2026-02-20T20:47:32.873Z" }, + { url = "https://files.pythonhosted.org/packages/59/0e/4223c2bbb63cd5c97f28ffb2a8aee71bdfb30b323c35d409450f51b91e3e/greenlet-3.3.2-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d248d8c23c67d2291ffd47af766e2a3aa9fa1c6703155c099feb11f526c63a92", size = 614219, upload-time = "2026-02-20T20:55:59.817Z" }, + { url = "https://files.pythonhosted.org/packages/94/2b/4d012a69759ac9d77210b8bfb128bc621125f5b20fc398bce3940d036b1c/greenlet-3.3.2-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ccd21bb86944ca9be6d967cf7691e658e43417782bce90b5d2faeda0ff78a7dd", size = 628268, upload-time = "2026-02-20T21:02:48.024Z" }, + { url = "https://files.pythonhosted.org/packages/7a/34/259b28ea7a2a0c904b11cd36c79b8cef8019b26ee5dbe24e73b469dea347/greenlet-3.3.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b6997d360a4e6a4e936c0f9625b1c20416b8a0ea18a8e19cabbefc712e7397ab", size = 616774, upload-time = "2026-02-20T20:21:02.454Z" }, + { url = "https://files.pythonhosted.org/packages/0a/03/996c2d1689d486a6e199cb0f1cf9e4aa940c500e01bdf201299d7d61fa69/greenlet-3.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:64970c33a50551c7c50491671265d8954046cb6e8e2999aacdd60e439b70418a", size = 1571277, upload-time = "2026-02-20T20:49:34.795Z" }, + { url = "https://files.pythonhosted.org/packages/d9/c4/2570fc07f34a39f2caf0bf9f24b0a1a0a47bc2e8e465b2c2424821389dfc/greenlet-3.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1a9172f5bf6bd88e6ba5a84e0a68afeac9dc7b6b412b245dd64f52d83c81e55b", size = 1640455, upload-time = "2026-02-20T20:21:10.261Z" }, + { url = "https://files.pythonhosted.org/packages/91/39/5ef5aa23bc545aa0d31e1b9b55822b32c8da93ba657295840b6b34124009/greenlet-3.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:a7945dd0eab63ded0a48e4dcade82939783c172290a7903ebde9e184333ca124", size = 230961, upload-time = "2026-02-20T20:16:58.461Z" }, + { url = "https://files.pythonhosted.org/packages/62/6b/a89f8456dcb06becff288f563618e9f20deed8dd29beea14f9a168aef64b/greenlet-3.3.2-cp313-cp313-win_arm64.whl", hash = "sha256:394ead29063ee3515b4e775216cb756b2e3b4a7e55ae8fd884f17fa579e6b327", size = 230221, upload-time = "2026-02-20T20:17:37.152Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ae/8bffcbd373b57a5992cd077cbe8858fff39110480a9d50697091faea6f39/greenlet-3.3.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:8d1658d7291f9859beed69a776c10822a0a799bc4bfe1bd4272bb60e62507dab", size = 279650, upload-time = "2026-02-20T20:18:00.783Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c0/45f93f348fa49abf32ac8439938726c480bd96b2a3c6f4d949ec0124b69f/greenlet-3.3.2-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18cb1b7337bca281915b3c5d5ae19f4e76d35e1df80f4ad3c1a7be91fadf1082", size = 650295, upload-time = "2026-02-20T20:47:34.036Z" }, + { url = "https://files.pythonhosted.org/packages/b3/de/dd7589b3f2b8372069ab3e4763ea5329940fc7ad9dcd3e272a37516d7c9b/greenlet-3.3.2-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2e47408e8ce1c6f1ceea0dffcdf6ebb85cc09e55c7af407c99f1112016e45e9", size = 662163, upload-time = "2026-02-20T20:56:01.295Z" }, + { url = "https://files.pythonhosted.org/packages/cd/ac/85804f74f1ccea31ba518dcc8ee6f14c79f73fe36fa1beba38930806df09/greenlet-3.3.2-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e3cb43ce200f59483eb82949bf1835a99cf43d7571e900d7c8d5c62cdf25d2f9", size = 675371, upload-time = "2026-02-20T21:02:49.664Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d8/09bfa816572a4d83bccd6750df1926f79158b1c36c5f73786e26dbe4ee38/greenlet-3.3.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63d10328839d1973e5ba35e98cccbca71b232b14051fd957b6f8b6e8e80d0506", size = 664160, upload-time = "2026-02-20T20:21:04.015Z" }, + { url = "https://files.pythonhosted.org/packages/48/cf/56832f0c8255d27f6c35d41b5ec91168d74ec721d85f01a12131eec6b93c/greenlet-3.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8e4ab3cfb02993c8cc248ea73d7dae6cec0253e9afa311c9b37e603ca9fad2ce", size = 1619181, upload-time = "2026-02-20T20:49:36.052Z" }, + { url = "https://files.pythonhosted.org/packages/0a/23/b90b60a4aabb4cec0796e55f25ffbfb579a907c3898cd2905c8918acaa16/greenlet-3.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94ad81f0fd3c0c0681a018a976e5c2bd2ca2d9d94895f23e7bb1af4e8af4e2d5", size = 1687713, upload-time = "2026-02-20T20:21:11.684Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ca/2101ca3d9223a1dc125140dbc063644dca76df6ff356531eb27bc267b446/greenlet-3.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:8c4dd0f3997cf2512f7601563cc90dfb8957c0cff1e3a1b23991d4ea1776c492", size = 232034, upload-time = "2026-02-20T20:20:08.186Z" }, + { url = "https://files.pythonhosted.org/packages/f6/4a/ecf894e962a59dea60f04877eea0fd5724618da89f1867b28ee8b91e811f/greenlet-3.3.2-cp314-cp314-win_arm64.whl", hash = "sha256:cd6f9e2bbd46321ba3bbb4c8a15794d32960e3b0ae2cc4d49a1a53d314805d71", size = 231437, upload-time = "2026-02-20T20:18:59.722Z" }, + { url = "https://files.pythonhosted.org/packages/98/6d/8f2ef704e614bcf58ed43cfb8d87afa1c285e98194ab2cfad351bf04f81e/greenlet-3.3.2-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:e26e72bec7ab387ac80caa7496e0f908ff954f31065b0ffc1f8ecb1338b11b54", size = 286617, upload-time = "2026-02-20T20:19:29.856Z" }, + { url = "https://files.pythonhosted.org/packages/5e/0d/93894161d307c6ea237a43988f27eba0947b360b99ac5239ad3fe09f0b47/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b466dff7a4ffda6ca975979bab80bdadde979e29fc947ac3be4451428d8b0e4", size = 655189, upload-time = "2026-02-20T20:47:35.742Z" }, + { url = "https://files.pythonhosted.org/packages/f5/2c/d2d506ebd8abcb57386ec4f7ba20f4030cbe56eae541bc6fd6ef399c0b41/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b8bddc5b73c9720bea487b3bffdb1840fe4e3656fba3bd40aa1489e9f37877ff", size = 658225, upload-time = "2026-02-20T20:56:02.527Z" }, + { url = "https://files.pythonhosted.org/packages/d1/67/8197b7e7e602150938049d8e7f30de1660cfb87e4c8ee349b42b67bdb2e1/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:59b3e2c40f6706b05a9cd299c836c6aa2378cabe25d021acd80f13abf81181cf", size = 666581, upload-time = "2026-02-20T21:02:51.526Z" }, + { url = "https://files.pythonhosted.org/packages/8e/30/3a09155fbf728673a1dea713572d2d31159f824a37c22da82127056c44e4/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b26b0f4428b871a751968285a1ac9648944cea09807177ac639b030bddebcea4", size = 657907, upload-time = "2026-02-20T20:21:05.259Z" }, + { url = "https://files.pythonhosted.org/packages/f3/fd/d05a4b7acd0154ed758797f0a43b4c0962a843bedfe980115e842c5b2d08/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1fb39a11ee2e4d94be9a76671482be9398560955c9e568550de0224e41104727", size = 1618857, upload-time = "2026-02-20T20:49:37.309Z" }, + { url = "https://files.pythonhosted.org/packages/6f/e1/50ee92a5db521de8f35075b5eff060dd43d39ebd46c2181a2042f7070385/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:20154044d9085151bc309e7689d6f7ba10027f8f5a8c0676ad398b951913d89e", size = 1680010, upload-time = "2026-02-20T20:21:13.427Z" }, + { url = "https://files.pythonhosted.org/packages/29/4b/45d90626aef8e65336bed690106d1382f7a43665e2249017e9527df8823b/greenlet-3.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c04c5e06ec3e022cbfe2cd4a846e1d4e50087444f875ff6d2c2ad8445495cf1a", size = 237086, upload-time = "2026-02-20T20:20:45.786Z" }, +] + [[package]] name = "h11" version = "0.16.0" @@ -184,6 +392,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, ] +[[package]] +name = "importlib-metadata" +version = "9.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a9/01/15bb152d77b21318514a96f43af312635eb2500c96b55398d020c93d86ea/importlib_metadata-9.0.0.tar.gz", hash = "sha256:a4f57ab599e6a2e3016d7595cfd72eb4661a5106e787a95bcc90c7105b831efc", size = 56405, upload-time = "2026-03-20T06:42:56.999Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/3d/2d244233ac4f76e38533cfcb2991c9eb4c7bf688ae0a036d30725b8faafe/importlib_metadata-9.0.0-py3-none-any.whl", hash = "sha256:2d21d1cc5a017bd0559e36150c21c830ab1dc304dedd1b7ea85d20f45ef3edd7", size = 27789, upload-time = "2026-03-20T06:42:55.665Z" }, +] + [[package]] name = "iniconfig" version = "2.3.0" @@ -193,6 +413,27 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, ] +[[package]] +name = "itsdangerous" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410, upload-time = "2024-04-16T21:28:15.614Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234, upload-time = "2024-04-16T21:28:14.499Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + [[package]] name = "lxml" version = "6.0.2" @@ -307,6 +548,80 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, ] +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, + { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, + { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, + { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, + { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, + { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, + { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, + { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, + { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" }, + { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" }, + { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" }, + { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" }, + { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" }, + { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" }, + { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" }, + { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" }, + { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" }, + { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" }, + { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" }, + { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, + { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, +] + [[package]] name = "mdurl" version = "0.1.2" @@ -316,6 +631,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, ] +[[package]] +name = "narwhals" +version = "2.18.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/59/96/45218c2fdec4c9f22178f905086e85ef1a6d63862dcc3cd68eb60f1867f5/narwhals-2.18.1.tar.gz", hash = "sha256:652a1fcc9d432bbf114846688884c215f17eb118aa640b7419295d2f910d2a8b", size = 620578, upload-time = "2026-03-24T15:11:25.456Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/c3/06490e98393dcb4d6ce2bf331a39335375c300afaef526897881fbeae6ab/narwhals-2.18.1-py3-none-any.whl", hash = "sha256:a0a8bb80205323851338888ba3a12b4f65d352362c8a94be591244faf36504ad", size = 444952, upload-time = "2026-03-24T15:11:23.801Z" }, +] + +[[package]] +name = "nest-asyncio" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418, upload-time = "2024-01-21T14:25:19.227Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195, upload-time = "2024-01-21T14:25:17.223Z" }, +] + [[package]] name = "openpyxl" version = "3.1.5" @@ -337,6 +670,38 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, ] +[[package]] +name = "playwright" +version = "1.58.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet" }, + { name = "pyee" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/c9/9c6061d5703267f1baae6a4647bfd1862e386fbfdb97d889f6f6ae9e3f64/playwright-1.58.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:96e3204aac292ee639edbfdef6298b4be2ea0a55a16b7068df91adac077cc606", size = 42251098, upload-time = "2026-01-30T15:09:24.028Z" }, + { url = "https://files.pythonhosted.org/packages/e0/40/59d34a756e02f8c670f0fee987d46f7ee53d05447d43cd114ca015cb168c/playwright-1.58.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:70c763694739d28df71ed578b9c8202bb83e8fe8fb9268c04dd13afe36301f71", size = 41039625, upload-time = "2026-01-30T15:09:27.558Z" }, + { url = "https://files.pythonhosted.org/packages/e1/ee/3ce6209c9c74a650aac9028c621f357a34ea5cd4d950700f8e2c4b7fe2c4/playwright-1.58.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:185e0132578733d02802dfddfbbc35f42be23a45ff49ccae5081f25952238117", size = 42251098, upload-time = "2026-01-30T15:09:30.461Z" }, + { url = "https://files.pythonhosted.org/packages/f1/af/009958cbf23fac551a940d34e3206e6c7eed2b8c940d0c3afd1feb0b0589/playwright-1.58.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:c95568ba1eda83812598c1dc9be60b4406dffd60b149bc1536180ad108723d6b", size = 46235268, upload-time = "2026-01-30T15:09:33.787Z" }, + { url = "https://files.pythonhosted.org/packages/d9/a6/0e66ad04b6d3440dae73efb39540c5685c5fc95b17c8b29340b62abbd952/playwright-1.58.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f9999948f1ab541d98812de25e3a8c410776aa516d948807140aff797b4bffa", size = 45964214, upload-time = "2026-01-30T15:09:36.751Z" }, + { url = "https://files.pythonhosted.org/packages/0e/4b/236e60ab9f6d62ed0fd32150d61f1f494cefbf02304c0061e78ed80c1c32/playwright-1.58.0-py3-none-win32.whl", hash = "sha256:1e03be090e75a0fabbdaeab65ce17c308c425d879fa48bb1d7986f96bfad0b99", size = 36815998, upload-time = "2026-01-30T15:09:39.627Z" }, + { url = "https://files.pythonhosted.org/packages/41/f8/5ec599c5e59d2f2f336a05b4f318e733077cd5044f24adb6f86900c3e6a7/playwright-1.58.0-py3-none-win_amd64.whl", hash = "sha256:a2bf639d0ce33b3ba38de777e08697b0d8f3dc07ab6802e4ac53fb65e3907af8", size = 36816005, upload-time = "2026-01-30T15:09:42.449Z" }, + { url = "https://files.pythonhosted.org/packages/c8/c4/cc0229fea55c87d6c9c67fe44a21e2cd28d1d558a5478ed4d617e9fb0c93/playwright-1.58.0-py3-none-win_arm64.whl", hash = "sha256:32ffe5c303901a13a0ecab91d1c3f74baf73b84f4bedbb6b935f5bc11cc98e1b", size = 33085919, upload-time = "2026-01-30T15:09:45.71Z" }, +] + +[[package]] +name = "plotly" +version = "6.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "narwhals" }, + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/24/fb/41efe84970cfddefd4ccf025e2cbfafe780004555f583e93dba3dac2cdef/plotly-6.6.0.tar.gz", hash = "sha256:b897f15f3b02028d69f755f236be890ba950d0a42d7dfc619b44e2d8cea8748c", size = 7027956, upload-time = "2026-03-02T21:10:25.321Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/d2/c6e44dba74f17c6216ce1b56044a9b93a929f1c2d5bdaff892512b260f5e/plotly-6.6.0-py3-none-any.whl", hash = "sha256:8d6daf0f87412e0c0bfe72e809d615217ab57cc715899a1e5145135a7800d1d0", size = 9910315, upload-time = "2026-03-02T21:10:18.131Z" }, +] + [[package]] name = "pluggy" version = "1.6.0" @@ -424,6 +789,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/50/f2/c0e76a0b451ffdf0cf788932e182758eb7558953f4f27f1aff8e2518b653/pyarrow-23.0.1-cp314-cp314t-win_amd64.whl", hash = "sha256:527e8d899f14bd15b740cd5a54ad56b7f98044955373a17179d5956ddb93d9ce", size = 28365807, upload-time = "2026-02-16T10:14:03.892Z" }, ] +[[package]] +name = "pyee" +version = "13.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8b/04/e7c1fe4dc78a6fdbfd6c337b1c3732ff543b8a397683ab38378447baa331/pyee-13.0.1.tar.gz", hash = "sha256:0b931f7c14535667ed4c7e0d531716368715e860b988770fc7eb8578d1f67fc8", size = 31655, upload-time = "2026-02-14T21:12:28.044Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/c4/b4d4827c93ef43c01f599ef31453ccc1c132b353284fc6c87d535c233129/pyee-13.0.1-py3-none-any.whl", hash = "sha256:af2f8fede4171ef667dfded53f96e2ed0d6e6bd7ee3bb46437f77e3b57689228", size = 15659, upload-time = "2026-02-14T21:12:26.263Z" }, +] + [[package]] name = "pygments" version = "2.20.0" @@ -449,6 +826,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, ] +[[package]] +name = "requests" +version = "2.33.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5f/a4/98b9c7c6428a668bf7e42ebb7c79d576a1c3c1e3ae2d47e674b468388871/requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517", size = 134120, upload-time = "2026-03-30T16:09:15.531Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/8e/7540e8a2036f79a125c1d2ebadf69ed7901608859186c856fa0388ef4197/requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a", size = 64947, upload-time = "2026-03-30T16:09:13.83Z" }, +] + [[package]] name = "respx" version = "0.22.0" @@ -461,6 +853,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8e/67/afbb0978d5399bc9ea200f1d4489a23c9a1dad4eee6376242b8182389c79/respx-0.22.0-py2.py3-none-any.whl", hash = "sha256:631128d4c9aba15e56903fb5f66fb1eff412ce28dd387ca3a81339e52dbd3ad0", size = 25127, upload-time = "2024-12-19T22:33:57.837Z" }, ] +[[package]] +name = "retrying" +version = "1.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c8/5a/b17e1e257d3e6f2e7758930e1256832c9ddd576f8631781e6a072914befa/retrying-1.4.2.tar.gz", hash = "sha256:d102e75d53d8d30b88562d45361d6c6c934da06fab31bd81c0420acb97a8ba39", size = 11411, upload-time = "2025-08-03T03:35:25.189Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/67/f3/6cd296376653270ac1b423bb30bd70942d9916b6978c6f40472d6ac038e7/retrying-1.4.2-py3-none-any.whl", hash = "sha256:bbc004aeb542a74f3569aeddf42a2516efefcdaff90df0eb38fbfbf19f179f59", size = 10859, upload-time = "2025-08-03T03:35:23.829Z" }, +] + [[package]] name = "rich" version = "14.3.3" @@ -474,6 +875,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d", size = 310458, upload-time = "2026-02-19T17:23:13.732Z" }, ] +[[package]] +name = "setuptools" +version = "82.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4f/db/cfac1baf10650ab4d1c111714410d2fbb77ac5a616db26775db562c8fab2/setuptools-82.0.1.tar.gz", hash = "sha256:7d872682c5d01cfde07da7bccc7b65469d3dca203318515ada1de5eda35efbf9", size = 1152316, upload-time = "2026-03-09T12:47:17.221Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9d/76/f789f7a86709c6b087c5a2f52f911838cad707cc613162401badc665acfe/setuptools-82.0.1-py3-none-any.whl", hash = "sha256:a59e362652f08dcd477c78bb6e7bd9d80a7995bc73ce773050228a348ce2e5bb", size = 1006223, upload-time = "2026-03-09T12:47:15.026Z" }, +] + [[package]] name = "shellingham" version = "1.5.4" @@ -483,6 +893,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, ] +[[package]] +name = "soupsieve" +version = "2.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/ae/2d9c981590ed9999a0d91755b47fc74f74de286b0f5cee14c9269041e6c4/soupsieve-2.8.3.tar.gz", hash = "sha256:3267f1eeea4251fb42728b6dfb746edc9acaffc4a45b27e19450b676586e8349", size = 118627, upload-time = "2026-01-20T04:27:02.457Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/2c/1462b1d0a634697ae9e55b3cecdcb64788e8b7d63f54d923fcd0bb140aed/soupsieve-2.8.3-py3-none-any.whl", hash = "sha256:ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95", size = 37016, upload-time = "2026-01-20T04:27:01.012Z" }, +] + [[package]] name = "typer" version = "0.24.1" @@ -507,6 +926,27 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, ] +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, +] + +[[package]] +name = "werkzeug" +version = "3.1.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b5/43/76ded108b296a49f52de6bac5192ca1c4be84e886f9b5c9ba8427d9694fd/werkzeug-3.1.7.tar.gz", hash = "sha256:fb8c01fe6ab13b9b7cdb46892b99b1d66754e1d7ab8e542e865ec13f526b5351", size = 875700, upload-time = "2026-03-24T01:08:07.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/b2/0bba9bbb4596d2d2f285a16c2ab04118f6b957d8441566e1abb892e6a6b2/werkzeug-3.1.7-py3-none-any.whl", hash = "sha256:4b314d81163a3e1a169b6a0be2a000a0e204e8873c5de6586f453c55688d422f", size = 226295, upload-time = "2026-03-24T01:08:06.133Z" }, +] + [[package]] name = "xlrd" version = "2.0.2" @@ -515,3 +955,12 @@ sdist = { url = "https://files.pythonhosted.org/packages/07/5a/377161c2d3538d199 wheels = [ { url = "https://files.pythonhosted.org/packages/1a/62/c8d562e7766786ba6587d09c5a8ba9f718ed3fa8af7f4553e8f91c36f302/xlrd-2.0.2-py2.py3-none-any.whl", hash = "sha256:ea762c3d29f4cca48d82df517b6d89fbce4db3107f9d78713e48cd321d5c9aa9", size = 96555, upload-time = "2025-06-14T08:46:37.766Z" }, ] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +]