palpen
11/1/2017 - 6:11 PM

## Generate polynomials with random perturbations over its domain for simulations

Generate polynomials with random perturbations over its domain for simulations

``````"""
Source: http://scikit-learn.org/stable/auto_examples/linear_model/plot_polynomial_interpolation.html
"""

import numpy as np
import matplotlib.pyplot as plt

from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.pipeline import make_pipeline

# 1. Simple polynomial with weird shape over [0, 10]
def f(x):
""" function to approximate by polynomial interpolation"""
return (x ** 0.8) * np.sin(x + 12)

# generate points used to plot
x_plot = np.linspace(0, 10, 100)

# create matrix versions of these arrays
X_plot = x_plot[:, np.newaxis]

# plot
plt.plot(x_plot, f(x_plot), color='cornflowerblue', linewidth=2,
label="ground truth")
plt.legend(loc='lower left')
plt.show()

# 2. Create a random polynomial
def make_data(N=30, err=0.8, rseed=None):
# randomly sample the data
rng = np.random.RandomState(rseed)
X = rng.rand(N, 1) ** 2
y = 10 - 1. / (X.ravel() + 0.1)
if err > 0:
y += err * rng.randn(N)
return X, y

def PolynomialRegression(degree=2, **kwargs):

return make_pipeline(PolynomialFeatures(degree),
LinearRegression(**kwargs))

def custom_poly(degree):
"""Returns a polynomial function over [-0.1, 1] by fitting
a polynomial to a randomly generated datast"""
X, y = make_data()
xfit = np.linspace(-0.1, 1.0, 1000)[:, None]
model = PolynomialRegression(20).fit(X, y)

poly_func = lambda x: model.predict(x)

return poly_func

my_poly = custom_poly(15)

# plot
plt.plot(xfit.ravel(), my_poly(xfit), color='gray')
plt.axis([0, 1.0, -10, 20])
plt.show()

``````