cmkl/fall-2025/mat-210/assessment-00020/.ipynb_checkpoints/Untitled-checkpoint.ipynb

147 lines
5.5 KiB
Plaintext

{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "e8104954-e923-4bfc-b868-d0daafbaa9e6",
"metadata": {},
"outputs": [
{
"ename": "ModuleNotFoundError",
"evalue": "No module named 'numpy'",
"output_type": "error",
"traceback": [
"\u001b[31m---------------------------------------------------------------------------\u001b[39m",
"\u001b[31mModuleNotFoundError\u001b[39m Traceback (most recent call last)",
"\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[1]\u001b[39m\u001b[32m, line 1\u001b[39m\n\u001b[32m----> \u001b[39m\u001b[32m1\u001b[39m \u001b[38;5;28;01mimport\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01mnumpy\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mas\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01mnp\u001b[39;00m\n\u001b[32m 2\u001b[39m \u001b[38;5;28;01mimport\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01mpandas\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mas\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01mpd\u001b[39;00m\n\u001b[32m 3\u001b[39m \u001b[38;5;28;01mimport\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01mmatplotlib\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mpyplot\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mas\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01mplt\u001b[39;00m\n",
"\u001b[31mModuleNotFoundError\u001b[39m: No module named 'numpy'"
]
}
],
"source": [
"import numpy as np\n",
"import pandas as pd\n",
"import matplotlib.pyplot as plt\n",
"\n",
"\n",
"# --- 1. Parameters and Data Generation ---\n",
"k = 11 # From Pattanaphol\n",
"N = 30\n",
"R = 1 + k/100 # 1.11\n",
"sigma = 0.05\n",
"np.random.seed(42) # for reproducibility\n",
"\n",
"\n",
"# Generate x values\n",
"x = np.linspace(0.1, R, N)\n",
"\n",
"\n",
"# True function: y_true = 5x^4 - 3x^3 + 2x^2 - x + 1\n",
"y_true_func = lambda x: 5*x**4 - 3*x**3 + 2*x**2 - x + 1\n",
"y_true = y_true_func(x)\n",
"noise = np.random.normal(0, sigma, N)\n",
"y_noisy = y_true + noise\n",
"\n",
"\n",
"# --- 2. Helper Functions ---\n",
"def vandermonde_matrix(x, degree):\n",
" \"\"\"Constructs the Vandermonde matrix A = [x^n, ..., x^1, 1]\"\"\"\n",
" return np.vander(x, degree + 1, increasing=False)\n",
"\n",
"\n",
"def estimate_coefficients(A, B, rho=0):\n",
" \"\"\"Calculates coefficients X* for LS (rho=0) or Ridge (rho>0).\"\"\"\n",
" # X* = (A^T A + rho*I)^-1 * A^T B\n",
" AT = A.T\n",
" ATA = AT @ A\n",
" I = np.identity(ATA.shape[0])\n",
" \n",
" # Calculate X* using the generalized solution\n",
" XTX_plus_rhoI_inv = np.linalg.inv(ATA + rho * I)\n",
" X_star = XTX_plus_rhoI_inv @ AT @ B\n",
" return X_star\n",
"\n",
"\n",
"def poly_val(x_plot, X_star):\n",
" \"\"\"Evaluates the polynomial on x_plot using coefficients X_star.\"\"\"\n",
" degree = len(X_star) - 1\n",
" A_plot = np.vander(x_plot, degree + 1, increasing=False)\n",
" return A_plot @ X_star\n",
"\n",
"\n",
"# --- 3. Coefficient Estimation (LS & RR) ---\n",
"B = y_noisy.reshape(-1, 1)\n",
"\n",
"\n",
"# Calculate all models for plotting\n",
"X_star1 = estimate_coefficients(vandermonde_matrix(x, 1), B, rho=0)\n",
"X_star7_ls = estimate_coefficients(vandermonde_matrix(x, 7), B, rho=0)\n",
"X_star7_rr2 = estimate_coefficients(vandermonde_matrix(x, 7), B, rho=0.1)\n",
"\n",
"\n",
"# --- 4. Plotting ---\n",
"x_plot = np.linspace(x.min(), x.max(), 500)\n",
"y_model1 = poly_val(x_plot, X_star1)\n",
"y_model7_ls = poly_val(x_plot, X_star7_ls)\n",
"y_model7_rr2 = poly_val(x_plot, X_star7_rr2)\n",
"y_true_plot = y_true_func(x_plot)\n",
"\n",
"\n",
"plt.figure(figsize=(12, 8))\n",
"\n",
"\n",
"# Raw Data (Scatter Plot)\n",
"plt.scatter(x, y_noisy, label='Original Noisy Raw Data $(x_i, y_i)$', color='k', marker='o', s=30)\n",
"plt.plot(x_plot, y_true_plot, label='True Underlying Polynomial $y_{true}$ (Degree 4)', color='gray', linestyle='--')\n",
"\n",
"\n",
"# Estimated Models (Line Plots)\n",
"plt.plot(x_plot, y_model1, label='1) Model $n=1$ (Least Squares)', color='r', linestyle='-', linewidth=2)\n",
"plt.plot(x_plot, y_model7_ls, label='3) Model $n=7$ (Least Squares, Overfit)', color='g', linestyle='-', linewidth=2)\n",
"plt.plot(x_plot, y_model7_rr2, label=r'4.2) Model $n=7$ (Ridge, $\\rho=0.1$, Stabilized)', color='b', linestyle='-', linewidth=2)\n",
"\n",
"\n",
"\n",
"\n",
"plt.title('Noisy Polynomial Model Estimation: LS vs Ridge Regression')\n",
"plt.xlabel('$x$')\n",
"plt.ylabel('$y$')\n",
"plt.legend()\n",
"plt.grid(True, linestyle=':', alpha=0.6)\n",
"\n",
"\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "94c872cc-d8c3-454b-9d62-42b03378e2d9",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.14.0"
}
},
"nbformat": 4,
"nbformat_minor": 5
}