Work in progress

This commit is contained in:
2025-09-29 08:59:47 +02:00
parent 8a2a9d1064
commit b93df4af0f
26 changed files with 22986 additions and 0 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/venv/

View File

@ -0,0 +1,21 @@
import numpy as np
import matplotlib.pyplot as plt
# Valódi mért adatok
x_real = np.array([1,2,3,4,5,6,7,8,9,10])
y_real = np.array([6,8,9,11,13,14,15,17,18,20])
# Lineáris regresszió illesztése
coeffs = np.polyfit(x_real, y_real, 1) # 1. fokú polinom = egyenes || Visszaad egy tömböt, ami az egyenes együtthatóit tartalmazza: [meredekség, tengelymetszet].
print(coeffs)
lin_y = coeffs[0]*x_real + coeffs[1] # Kiszámolja az egyenes y értékeit minden x_real pontra. y = mx + n
# Ábrázolás
plt.scatter(x_real, y_real, color='blue', label='Valódi mért pontok')
plt.plot(x_real, lin_y, color='black', linestyle='--', label='Lineáris regresszió')
plt.xlabel('Reklámba fektetett összeg (millió Ft)')
plt.ylabel('Eladott jegyek (ezer db)')
plt.title('Valódi adatok és lineáris regresszió')
plt.legend()
plt.grid(True)
plt.show()

View File

@ -0,0 +1,29 @@
import numpy as np
import matplotlib.pyplot as plt
# Együtthatók
a, b, c, d = 1, -2, 3, -1
# X értékek
x = np.linspace(-2, 2, 400)
# Harmadfokú polinom értékek
y = a + b*x + c*x**2 + d*x**3
# Válassz néhány pontot a polinomról
x_points = np.linspace(-2, 2, 10)
y_points = a + b*x_points + c*x_points**2 + d*x_points**3
# Lineáris regresszió illesztése a pontokra
coeffs = np.polyfit(x_points, y_points, 1) # 1. fokú polinom = egyenes
lin_y = coeffs[0]*x + coeffs[1]
# Ábrázolás
plt.plot(x, y, label='Harmadfokú polinom')
plt.scatter(x_points, y_points, color='blue', marker='x', s=80, label='Polinom pontjai')
plt.plot(x, lin_y, color='black', linestyle='--', label='Lineáris regresszió')
plt.xlabel('x')
plt.ylabel('y')
plt.title('Polinom és lineáris regresszió')
plt.legend()
plt.grid(True)
plt.show()

View File

@ -0,0 +1,52 @@
import numpy as np
import matplotlib.pyplot as plt
from numpy import roots
# Együtthatók
a, b, c, d = 1, -2, 3, -1
# X értékek
x = np.linspace(-2, 2, 400)
# Polinom értékek
y = a + b*x + c*x**2 + d*x**3
# Gyökök (ahol f(x)=0)
gyokok = roots([d, c, b, a])
real_gyokok = gyokok[np.isreal(gyokok)].real
# Első derivált: extrémumok
# f'(x) = b + 2c*x + 3d*x**2
extr_gyokok = roots([3*d, 2*c, b])
real_extr = extr_gyokok[np.isreal(extr_gyokok)].real
extr_y = a + b*real_extr + c*real_extr**2 + d*real_extr**3
# Második derivált: inflexiós pont
# f''(x) = 2c + 6d*x
iflex_x = -2*c/(6*d)
iflex_y = a + b*iflex_x + c*iflex_x**2 + d*iflex_x**3
# Véletlenszerűen kiválasztott x pontok
x_points = np.linspace(-2, 2, 8)
y_points = a + b*x_points + c*x_points**2 + d*x_points**3
# Lineáris regresszió illesztése
coeffs = np.polyfit(x_points, y_points, 1) # 1. fokú polinom = egyenes
lin_y = coeffs[0]*x + coeffs[1] # y = a*x + b
plt.plot(x, y, label='Polinom')
plt.scatter(real_gyokok, np.zeros_like(real_gyokok), color='red', label='Gyökök')
plt.scatter(real_extr, extr_y, color='green', label='Extrémumok')
plt.scatter(iflex_x, iflex_y, color='purple', label='Inflekciós pont')
plt.scatter(0, a, color='orange', label='Y-tengely metszéspont')
plt.scatter(x_points, y_points, color='blue', marker='x', s=80, label='Közelítendő pontok')
plt.plot(x, lin_y, color='black', linestyle='--', label='Lineáris regresszió')
plt.legend()
plt.xlabel('x')
plt.ylabel('f(x)')
plt.title('Polinom, pontok és lineáris regresszió')
plt.grid(True)
plt.show()

View File

@ -0,0 +1,40 @@
# -*- coding: utf-8 -*-
import numpy as np
import math
# Create random input and output data
x = np.linspace(-math.pi, math.pi, 2000) #spaces between -pi and pi with 2000 points equally distributed
y = np.sin(x)
# Randomly initialize weights
a = np.random.randn()
b = np.random.randn()
c = np.random.randn()
d = np.random.randn()
learning_rate = 1e-6
for t in range(2000):
# Forward pass: compute predicted y
# y = a + b x + c x^2 + d x^3
y_pred = a + b * x + c * x ** 2 + d * x ** 3
# Compute and print loss
loss = np.square(y_pred - y).sum()
if t % 100 == 99:
print(t, loss)
# Backprop to compute gradients of a, b, c, d with respect to loss
grad_y_pred = 2.0 * (y_pred - y)
grad_a = grad_y_pred.sum()
grad_b = (grad_y_pred * x).sum()
grad_c = (grad_y_pred * x ** 2).sum()
grad_d = (grad_y_pred * x ** 3).sum()
# Update weights
a -= learning_rate * grad_a
b -= learning_rate * grad_b
c -= learning_rate * grad_c
d -= learning_rate * grad_d
print(f'Result: y = {a} + {b} x + {c} x^2 + {d} x^3')

View File

@ -0,0 +1,37 @@
import numpy as np
import math
import matplotlib.pyplot as plt
# Adatok előállítása
x = np.linspace(-math.pi, math.pi, 2000)
y = np.sin(x)
# Véletlen súlyok
a = np.random.randn()
b = np.random.randn()
c = np.random.randn()
d = np.random.randn()
learning_rate = 1e-6
for t in range(2000):
y_pred = a + b * x + c * x ** 2 + d * x ** 3
loss = np.square(y_pred - y).sum()
grad_y_pred = 2.0 * (y_pred - y)
grad_a = grad_y_pred.sum()
grad_b = (grad_y_pred * x).sum()
grad_c = (grad_y_pred * x ** 2).sum()
grad_d = (grad_y_pred * x ** 3).sum()
a -= learning_rate * grad_a
b -= learning_rate * grad_b
c -= learning_rate * grad_c
d -= learning_rate * grad_d
# Ábrázolás
plt.plot(x, y, label='Szinusz függvény')
plt.plot(x, a + b * x + c * x ** 2 + d * x ** 3, label='Tanult polinom', linestyle='--')
plt.xlabel('x')
plt.ylabel('y')
plt.title('Szinusz és tanult polinom összehasonlítása')
plt.legend()
plt.grid(True)
plt.show()

View File

@ -0,0 +1,140 @@
<img src="https://r2cdn.perplexity.ai/pplx-full-logo-primary-dark%402x.png" style="height:64px;margin-right:32px"/>
# Kódrészlet magyarázata lépésről lépésre
Ez a kód egy **egyszerű polinomiális regressziót** valósít meg Pythonban, Numpy segítségével, hogy egy szinuszhullámot közelítsen egy harmadfokú polinommal. Ez még nem PyTorch, hanem "kézzel" írt gépi tanulás, de a logika ugyanaz, mint amit PyTorch-ban is használnál.
## 1. Adatok előállítása
```python
x = np.linspace(-math.pi, math.pi, 2000)
y = np.sin(x)
```
- **x**: 2000 darab egyenletesen elosztott pont -π és π között.
- **y**: minden x-hez kiszámolja a szinusz értékét. Ez lesz a "valódi" függvény, amit közelíteni akarunk.
## 2. Súlyok (együtthatók) véletlenszerű inicializálása
```python
a = np.random.randn()
b = np.random.randn()
c = np.random.randn()
d = np.random.randn()
```
- Ezek a polinom együtthatói: $y = a + b x + c x^2 + d x^3$
- Kezdetben véletlen értékek, a tanulás során ezek fognak "tanulni".
## 3. Tanulási ráta beállítása
```python
learning_rate = 1e-6
```
- Ez határozza meg, hogy a súlyok mennyit változnak minden lépésben.
## 4. Tanítási ciklus (iterációk)
```python
for t in range(2000):
# ...
```
- 2000-szer ismétli a tanulási lépéseket.
### 4.1. Előrehaladás (forward pass)
```python
y_pred = a + b * x + c * x ** 2 + d * x ** 3
```
- A jelenlegi súlyokkal kiszámolja a polinom értékét minden x-re.
- Ez a "becsült" függvény, amit a tanulás során javítunk.
### 4.2. Veszteség (loss) kiszámítása
```python
loss = np.square(y_pred - y).sum()
```
- Megméri, mennyire tér el a becsült érték a valódi szinusz értéktől.
- A négyzetes eltérések összegét számolja (ez a **Mean Squared Error** logikája).
- Minden 100. lépésnél kiírja az aktuális veszteséget.
### 4.3. Visszaterjesztés (backpropagation) gradiens számítás
```python
grad_y_pred = 2.0 * (y_pred - y)
grad_a = grad_y_pred.sum()
grad_b = (grad_y_pred * x).sum()
grad_c = (grad_y_pred * x ** 2).sum()
grad_d = (grad_y_pred * x ** 3).sum()
```
- Kiszámolja, hogy a veszteség hogyan változik, ha az egyes együtthatókat kicsit módosítjuk (ez a **gradiens**).
- Ez mutatja meg, milyen irányba kell "tolni" az együtthatókat, hogy a veszteség csökkenjen.
### 4.4. Súlyok frissítése (gradient descent)
```python
a -= learning_rate * grad_a
b -= learning_rate * grad_b
c -= learning_rate * grad_c
d -= learning_rate * grad_d
```
- Minden együtthatót módosít a gradiens irányába, a tanulási ráta mértékével.
- Így a polinom egyre jobban fogja közelíteni a szinusz görbét.
## 5. Eredmény kiírása
```python
print(f'Result: y = {a} + {b} x + {c} x^2 + {d} x^3')
```
- A tanulás végén kiírja a megtanult polinom együtthatóit.
- Ezekkel az értékekkel a polinomod a lehető legjobban közelíti a szinusz hullámot a megadott x tartományban.
***
## Mit kellene látnod eredményként?
- A veszteség (loss) értéke a tanulás során **folyamatosan csökken** ez azt jelenti, hogy a polinom egyre jobban közelíti a szinusz hullámot.
- A végén kapsz egy polinomot, aminek az együtthatói (a, b, c, d) már "megtanulták" a szinusz hullám alakját.
- Ha ábrázolod a $y$ (szinusz) és $y_{pred}$ (polinom) görbéket, a tanulás végén a két görbe nagyon hasonló lesz.
***
## Lépések összefoglalva
1. **Adatok előállítása**: x és y (szinusz értékek)
2. **Súlyok inicializálása**: a, b, c, d
3. **Tanulási ciklus**:
- Előrehaladás: polinom kiszámítása
- Veszteség: eltérés mérése
- Gradiens: hogyan változtassunk a súlyokon
- Súlyok frissítése: tanulás
4. **Eredmény**: megtanult polinom együtthatók
***
Ha szeretnéd, segítek abban is, hogyan lehet ezt PyTorch-ban automatizálni, vagy hogyan ábrázolhatod a tanulás eredményét grafikonon!
<span style="display:none">[^1][^2][^3]</span>
<div align="center"></div>
[^1]: https://www.youtube.com/watch?v=oKNIr-iCb6g
[^2]: https://learn.microsoft.com/hu-hu/training/modules/understand-regression-machine-learning/
[^3]: https://www.youtube.com/watch?v=YAJ5XBwlN4o

View File

@ -0,0 +1,44 @@
import torch
import math
# Set up PyTorch data type and device (CPU or GPU)
dtype = torch.float
device = torch.device("cpu")
# device = torch.device("cuda:0") # Uncomment to use GPU if available
# 1. Generate input data (x) in [-π, π] and corresponding target output (y = sin(x))
x = torch.linspace(-math.pi, math.pi, 2000, device=device, dtype=dtype)
y = torch.sin(x) # This is what we want to approximate!
# 2. Randomly initialize the weights (polynomial coefficients)
a = torch.randn((), device=device, dtype=dtype)
b = torch.randn((), device=device, dtype=dtype)
c = torch.randn((), device=device, dtype=dtype)
d = torch.randn((), device=device, dtype=dtype)
learning_rate = 1e-6
for t in range(2000):
# 3. Forward pass: compute predicted y using the current coefficients
# y_pred = a + b*x + c*x^2 + d*x^3 (a cubic polynomial)
y_pred = a + b * x + c * x ** 2 + d * x ** 3
# 4. Compute loss: sum of squared differences between prediction and true values
# (This is called the "Mean Squared Error" loss, except without the mean)
loss = (y_pred - y).pow(2).sum().item()
if t % 100 == 99:
print(t, loss)
# 5. Manually compute gradients for each weight
grad_y_pred = 2.0 * (y_pred - y) # Derivative of loss w.r.t. y_pred
grad_a = grad_y_pred.sum() # Derivative for a
grad_b = (grad_y_pred * x).sum() # Derivative for b
grad_c = (grad_y_pred * x ** 2).sum() # Derivative for c
grad_d = (grad_y_pred * x ** 3).sum() # Derivative for d
# 6. Update each weight by taking a small step in the opposite direction of the gradient
a -= learning_rate * grad_a
b -= learning_rate * grad_b
c -= learning_rate * grad_c
d -= learning_rate * grad_d
print(f'Result: y = {a.item()} + {b.item()} x + {c.item()} x^2 + {d.item()} x^3')

View File

@ -0,0 +1,61 @@
import torch
import math
# We want to be able to train our model on an `accelerator <https://pytorch.org/docs/stable/torch.html#accelerators>`__
# such as CUDA, MPS, MTIA, or XPU. If the current accelerator is available, we will use it. Otherwise, we use the CPU.
dtype = torch.float
device = torch.accelerator.current_accelerator().type if torch.accelerator.is_available() else "cpu"
print(f"Using {device} device")
torch.set_default_device(device)
# Create Tensors to hold input and outputs.
# By default, requires_grad=False, which indicates that we do not need to
# compute gradients with respect to these Tensors during the backward pass.
x = torch.linspace(-math.pi, math.pi, 2000, dtype=dtype)
y = torch.sin(x)
# Create random Tensors for weights. For a third order polynomial, we need
# 4 weights: y = a + b x + c x^2 + d x^3
# Setting requires_grad=True indicates that we want to compute gradients with
# respect to these Tensors during the backward pass.
a = torch.randn((), dtype=dtype, requires_grad=True)
b = torch.randn((), dtype=dtype, requires_grad=True)
c = torch.randn((), dtype=dtype, requires_grad=True)
d = torch.randn((), dtype=dtype, requires_grad=True)
print(f"a = {a.item()}, b = {b.item()}, c = {c.item()}, d = {d.item()}")
learning_rate = 1e-6
for t in range(2000):
# Forward pass: compute predicted y using operations on Tensors.
y_pred = a + b * x + c * x ** 2 + d * x ** 3
# Compute and print loss using operations on Tensors.
# Now loss is a Tensor of shape (1,)
# loss.item() gets the scalar value held in the loss.
loss = (y_pred - y).pow(2).sum()
if t % 100 == 99:
print(t, loss.item())
# Use autograd to compute the backward pass. This call will compute the
# gradient of loss with respect to all Tensors with requires_grad=True.
# After this call a.grad, b.grad. c.grad and d.grad will be Tensors holding
# the gradient of the loss with respect to a, b, c, d respectively.
loss.backward()
# Manually update weights using gradient descent. Wrap in torch.no_grad()
# because weights have requires_grad=True, but we don't need to track this
# in autograd.
with torch.no_grad():
a -= learning_rate * a.grad
b -= learning_rate * b.grad
c -= learning_rate * c.grad
d -= learning_rate * d.grad
# Manually zero the gradients after updating weights
a.grad = None
b.grad = None
c.grad = None
d.grad = None
print(f'Result: y = {a.item()} + {b.item()} x + {c.item()} x^2 + {d.item()} x^3')

View File

@ -0,0 +1,87 @@
import torch
import math
class LegendrePolynomial3(torch.autograd.Function):
"""
We can implement our own custom autograd Functions by subclassing
torch.autograd.Function and implementing the forward and backward passes
which operate on Tensors.
"""
@staticmethod
def forward(ctx, input):
"""
In the forward pass we receive a Tensor containing the input and return
a Tensor containing the output. ctx is a context object that can be used
to stash information for backward computation. You can cache tensors for
use in the backward pass using the ``ctx.save_for_backward`` method. Other
objects can be stored directly as attributes on the ctx object, such as
``ctx.my_object = my_object``. Check out `Extending torch.autograd <https://docs.pytorch.org/docs/stable/notes/extending.html#extending-torch-autograd>`_
for further details.
"""
ctx.save_for_backward(input)
return 0.5 * (5 * input ** 3 - 3 * input)
@staticmethod
def backward(ctx, grad_output):
"""
In the backward pass we receive a Tensor containing the gradient of the loss
with respect to the output, and we need to compute the gradient of the loss
with respect to the input.
"""
input, = ctx.saved_tensors
return grad_output * 1.5 * (5 * input ** 2 - 1)
dtype = torch.float
device = torch.device("cpu")
# device = torch.device("cuda:0") # Uncomment this to run on GPU
# Create Tensors to hold input and outputs.
# By default, requires_grad=False, which indicates that we do not need to
# compute gradients with respect to these Tensors during the backward pass.
x = torch.linspace(-math.pi, math.pi, 2000, device=device, dtype=dtype)
y = torch.sin(x)
# Create random Tensors for weights. For this example, we need
# 4 weights: y = a + b * P3(c + d * x), these weights need to be initialized
# not too far from the correct result to ensure convergence.
# Setting requires_grad=True indicates that we want to compute gradients with
# respect to these Tensors during the backward pass.
a = torch.full((), 0.0, device=device, dtype=dtype, requires_grad=True)
b = torch.full((), -1.0, device=device, dtype=dtype, requires_grad=True)
c = torch.full((), 0.0, device=device, dtype=dtype, requires_grad=True)
d = torch.full((), 0.3, device=device, dtype=dtype, requires_grad=True)
learning_rate = 5e-6
for t in range(2000):
# To apply our Function, we use Function.apply method. We alias this as 'P3'.
P3 = LegendrePolynomial3.apply
# Forward pass: compute predicted y using operations; we compute
# P3 using our custom autograd operation.
y_pred = a + b * P3(c + d * x)
# Compute and print loss
loss = (y_pred - y).pow(2).sum()
if t % 100 == 99:
print(t, loss.item())
# Use autograd to compute the backward pass.
loss.backward()
# Update weights using gradient descent
with torch.no_grad():
a -= learning_rate * a.grad
b -= learning_rate * b.grad
c -= learning_rate * c.grad
d -= learning_rate * d.grad
# Manually zero the gradients after updating weights
a.grad = None
b.grad = None
c.grad = None
d.grad = None
print(f'Result: y = {a.item()} + {b.item()} * P3({c.item()} + {d.item()} x)')

View File

@ -0,0 +1,94 @@
<img src="https://r2cdn.perplexity.ai/pplx-full-logo-primary-dark%402x.png" style="height:64px;margin-right:32px"/>
# Custom autograd function in PyTorch: Step-by-step explanation
Let's break down what happens in your code, especially how you get the gradient numbers, what they mean, and how PyTorch's autograd system works when you define your own function.
***
## 1. What is a custom autograd function?
- In PyTorch, you can create your own mathematical operation and tell PyTorch **how to compute its gradient** (how it changes with respect to its input).
- You do this by subclassing `torch.autograd.Function` and implementing two methods:
- `forward`: computes the output from the input (normal math)
- `backward`: computes the gradient of the output with respect to the input (how the output changes if you nudge the input)
***
## 2. How do you get the gradient numbers?
- **Forward pass:** You calculate the output for your function. Here, it's the Legendre polynomial:
$$
P_3(x) = \frac{1}{2}(5x^3 - 3x)
$$
- **Backward pass:** You tell PyTorch the formula for the derivative of your function with respect to its input. For Legendre polynomial:
$$
\frac{dP_3}{dx} = \frac{1}{2}(15x^2 - 3) = 1.5(5x^2 - 1)
$$
- When you call `loss.backward()`, PyTorch uses your `backward` method to calculate how much the loss would change if you changed the input a little bit. This is the **gradient**.
- PyTorch automatically chains these gradients through all operations in your model, so you get the gradient of the loss with respect to every parameter (a, b, c, d).
***
## 3. What do these numbers mean?
- The gradient for each parameter (e.g., `a.grad`, `b.grad`, etc.) tells you **how much the loss would change if you nudged that parameter up or down**.
- If the gradient is large and positive, increasing the parameter will increase the loss (bad), so you want to decrease it.
- If the gradient is large and negative, increasing the parameter will decrease the loss (good), so you want to increase it.
- You use these numbers to update your parameters in the direction that makes the loss smaller (gradient descent).
***
## 4. How does PyTorch get these numbers?
- During the forward pass, PyTorch builds a **computation graph** of all operations.
- When you call `loss.backward()`, PyTorch walks backward through this graph, using the `backward` methods you defined (and built-in ones for standard operations), applying the chain rule to compute all gradients.
- The numbers you see in `.grad` are the result of this process: they are the derivatives of the loss with respect to each parameter.
***
## 5. Step-by-step in your code
1. **Forward pass:**
- You compute `y_pred = a + b * P3(c + d * x)` using your custom Legendre polynomial function.
2. **Loss calculation:**
- You compute the loss as the sum of squared errors between `y_pred` and the true `y`.
3. **Backward pass:**
- You call `loss.backward()`. PyTorch uses your custom `backward` method to compute the gradients for all parameters.
4. **Update parameters:**
- You use the gradients to update `a`, `b`, `c`, and `d` so the loss gets smaller.
***
## 6. Summary
- **Custom autograd functions** let you define both the math and the gradient for new operations.
- **Gradients** are numbers that tell you how to change each parameter to make the loss smaller.
- **PyTorch computes these automatically** using your `backward` method and the chain rule.
- You use these gradients to update your parameters and train your model.
If you want, I can walk through a concrete example with numbers, or show how the gradients change during training!
<span style="display:none">[^1][^2][^3][^4][^5][^6][^7][^8][^9]</span>
<div align="center"></div>
[^1]: https://docs.pytorch.org/tutorials/beginner/examples_autograd/polynomial_custom_function.html
[^2]: https://apxml.com/courses/advanced-pytorch/chapter-1-pytorch-internals-autograd/custom-autograd-functions
[^3]: https://docs.pytorch.org/tutorials/beginner/blitz/autograd_tutorial.html
[^4]: https://brsoff.github.io/tutorials/beginner/examples_autograd/two_layer_net_custom_function.html
[^5]: https://ghamrouni.github.io/stn-tuto/beginner/examples_autograd/two_layer_net_custom_function.html
[^6]: https://docs.pytorch.org/tutorials/beginner/pytorch_with_examples.html
[^7]: https://docs.pytorch.org/tutorials/intermediate/custom_function_double_backward_tutorial.html
[^8]: https://www.kaggle.com/code/peggy1502/learning-pytorch-2-new-autograd-functions
[^9]: https://stackoverflow.com/questions/54586938/how-to-wrap-pytorch-functions-and-implement-autograd

View File

@ -0,0 +1,69 @@
# -*- coding: utf-8 -*-
import torch
import math
# Create Tensors to hold input and outputs.
x = torch.linspace(-math.pi, math.pi, 2000)
y = torch.sin(x)
# For this example, the output y is a linear function of (x, x^2, x^3), so
# we can consider it as a linear layer neural network. Let's prepare the
# tensor (x, x^2, x^3).
p = torch.tensor([1, 2, 3])
xx = x.unsqueeze(-1).pow(p)
# In the above code, x.unsqueeze(-1) has shape (2000, 1), and p has shape
# (3,), for this case, broadcasting semantics will apply to obtain a tensor
# of shape (2000, 3)
# Use the nn package to define our model as a sequence of layers. nn.Sequential
# is a Module which contains other Modules, and applies them in sequence to
# produce its output. The Linear Module computes output from input using a
# linear function, and holds internal Tensors for its weight and bias.
# The Flatten layer flatens the output of the linear layer to a 1D tensor,
# to match the shape of `y`.
model = torch.nn.Sequential(
torch.nn.Linear(3, 1),
torch.nn.Flatten(0, 1)
)
# The nn package also contains definitions of popular loss functions; in this
# case we will use Mean Squared Error (MSE) as our loss function.
loss_fn = torch.nn.MSELoss(reduction='sum')
learning_rate = 1e-6
for t in range(2000):
# Forward pass: compute predicted y by passing x to the model. Module objects
# override the __call__ operator so you can call them like functions. When
# doing so you pass a Tensor of input data to the Module and it produces
# a Tensor of output data.
y_pred = model(xx)
# Compute and print loss. We pass Tensors containing the predicted and true
# values of y, and the loss function returns a Tensor containing the
# loss.
loss = loss_fn(y_pred, y)
if t % 100 == 99:
print(t, loss.item())
# Zero the gradients before running the backward pass.
model.zero_grad()
# Backward pass: compute gradient of the loss with respect to all the learnable
# parameters of the model. Internally, the parameters of each Module are stored
# in Tensors with requires_grad=True, so this call will compute gradients for
# all learnable parameters in the model.
loss.backward()
# Update the weights using gradient descent. Each parameter is a Tensor, so
# we can access its gradients like we did before.
with torch.no_grad():
for param in model.parameters():
param -= learning_rate * param.grad
# You can access the first layer of `model` like accessing the first item of a list
linear_layer = model[0]
# For linear layer, its parameters are stored as `weight` and `bias`.
print(f'Result: y = {linear_layer.bias.item()} + {linear_layer.weight[:, 0].item()} x + {linear_layer.weight[:, 1].item()} x^2 + {linear_layer.weight[:, 2].item()} x^3')

View File

@ -0,0 +1,128 @@
<img src="https://r2cdn.perplexity.ai/pplx-full-logo-primary-dark%402x.png" style="height:64px;margin-right:32px"/>
# What does this code do? Step-by-step explanation
You're absolutely right in your intuition! This time, you're using a **neural network** to learn the relationship between $x$ and $\sin(x)$ _without giving it the explicit formula_. Instead, you give the network lots of example data (inputs and outputs), and let it figure out the best way to predict $y$ from $x$.
Let's walk through every step of the code and what happens:
***
## 1. **Create input and output data**
```python
x = torch.linspace(-math.pi, math.pi, 2000)
y = torch.sin(x)
```
- **x**: 2000 evenly spaced points from $-\pi$ to $\pi$.
- **y**: The true output, $\sin(x)$, for each input $x$.
- This is your "training data"—pairs of (input, output) for the network to learn from.
***
## 2. **Prepare features for the model**
```python
p = torch.tensor([1, 2, 3])
xx = x.unsqueeze(-1).pow(p)
```
- You create a new tensor $xx$ where each row is $[x, x^2, x^3]$.
- This means the network will get three features for each input: $x$, $x^2$, and $x^3$.
- This helps the network learn more complex (curvy) relationships than just a straight line.
***
## 3. **Define the neural network model**
```python
model = torch.nn.Sequential(
torch.nn.Linear(3, 1),
torch.nn.Flatten(0, 1)
)
```
- **torch.nn.Linear(3, 1)**: A single layer that takes 3 inputs ($x, x^2, x^3$) and outputs 1 value (prediction for $y$).
- **torch.nn.Flatten(0, 1)**: Flattens the output to match the shape of $y$.
- This is a very simple neural network (just one layer), but it's enough for this regression task.
***
## 4. **Define the loss function**
```python
loss_fn = torch.nn.MSELoss(reduction='sum')
```
- **Mean Squared Error (MSE)**: Measures how far off the predictions are from the true values.
- The goal is to make this loss as small as possible during training.
***
## 5. **Training loop**
```python
for t in range(2000):
y_pred = model(xx)
loss = loss_fn(y_pred, y)
if t % 100 == 99:
print(t, loss.item())
model.zero_grad()
loss.backward()
with torch.no_grad():
for param in model.parameters():
param -= learning_rate * param.grad
```
- **Forward pass**: The model predicts $y$ for each input $xx$.
- **Compute loss**: How far off are the predictions from the true $y$?
- **Zero gradients**: Clear old gradients before computing new ones.
- **Backward pass**: Compute gradients (how to change weights to reduce loss).
- **Update weights**: Adjust the model's weights using gradient descent.
- This loop repeats 2000 times, gradually improving the model's predictions.
***
## 6. **Result: Print the learned polynomial**
```python
linear_layer = model[^0]
print(f'Result: y = {linear_layer.bias.item()} + {linear_layer.weight[:, 0].item()} x + {linear_layer.weight[:, 1].item()} x^2 + {linear_layer.weight[:, 2].item()} x^3')
```
- After training, you print the final weights and bias.
- This gives you the best-fit polynomial the network found to approximate $\sin(x)$ using the data.
***
## **Summary: What is happening?**
- You give the network lots of example data ($x$, $\sin(x)$).
- The network tries to find the best weights to predict $y$ from $x$, $x^2$, and $x^3$.
- You don't tell it the formula for $\sin(x)$—it "learns" the best approximation from the data.
- This is the core idea of neural networks: **learn patterns from data, not from explicit formulas**.
If you want to use a more complex neural network (with hidden layers and nonlinear activations), you can! That would let the model learn even more complicated relationships, and is the next step in deep learning.
Let me know if you want to see how to do that, or want to visualize the predictions vs. the true values!
<span style="display:none">[^1][^2][^3][^4][^5][^6][^7][^8]</span>
<div align="center"></div>
[^1]: https://docs.pytorch.org/tutorials/beginner/pytorch_with_examples.html
[^2]: https://discuss.pytorch.org/t/neural-net-to-approximate-a-sine-function/101647
[^3]: https://mycoding.uk/a/pytorch_-_neural_network_for_simple_regression_analysis.html
[^4]: https://colab.research.google.com/github/lcharlin/80-629/blob/master/week6-RNNs+CNNs/RNNs_Questions.ipynb
[^5]: https://code-first-ml.github.io/book1/notebooks/neural_networks/2018-01-13-denoising.html
[^6]: https://www.youtube.com/watch?v=RSjB5kNHKXc
[^7]: https://discuss.pytorch.org/t/approximating-sine-function-using-neural-network/65875
[^8]: https://stackoverflow.com/questions/13897316/approximating-the-sine-function-with-a-neural-network

View File

@ -0,0 +1,53 @@
# -*- coding: utf-8 -*-
import torch
import math
# Create Tensors to hold input and outputs.
x = torch.linspace(-math.pi, math.pi, 2000)
y = torch.sin(x)
# Prepare the input tensor (x, x^2, x^3).
p = torch.tensor([1, 2, 3])
xx = x.unsqueeze(-1).pow(p)
# Use the nn package to define our model and loss function.
model = torch.nn.Sequential(
torch.nn.Linear(3, 1),
torch.nn.Flatten(0, 1)
)
loss_fn = torch.nn.MSELoss(reduction='sum')
# Use the optim package to define an Optimizer that will update the weights of
# the model for us. Here we will use RMSprop; the optim package contains many other
# optimization algorithms. The first argument to the RMSprop constructor tells the
# optimizer which Tensors it should update.
learning_rate = 1e-3
optimizer = torch.optim.RMSprop(model.parameters(), lr=learning_rate)
for t in range(2000):
# Forward pass: compute predicted y by passing x to the model.
y_pred = model(xx)
# Compute and print loss.
loss = loss_fn(y_pred, y)
if t % 100 == 99:
print(t, loss.item())
# Before the backward pass, use the optimizer object to zero all of the
# gradients for the variables it will update (which are the learnable
# weights of the model). This is because by default, gradients are
# accumulated in buffers( i.e, not overwritten) whenever .backward()
# is called. Checkout docs of torch.autograd.backward for more details.
optimizer.zero_grad()
# Backward pass: compute gradient of the loss with respect to model
# parameters
loss.backward()
# Calling the step function on an Optimizer makes an update to its
# parameters
optimizer.step()
linear_layer = model[0]
print(f'Result: y = {linear_layer.bias.item()} + {linear_layer.weight[:, 0].item()} x + {linear_layer.weight[:, 1].item()} x^2 + {linear_layer.weight[:, 2].item()} x^3')

View File

@ -0,0 +1,59 @@
# -*- coding: utf-8 -*-
import torch
import math
class Polynomial3(torch.nn.Module):
def __init__(self):
"""
In the constructor we instantiate four parameters and assign them as
member parameters.
"""
super().__init__()
self.a = torch.nn.Parameter(torch.randn(()))
self.b = torch.nn.Parameter(torch.randn(()))
self.c = torch.nn.Parameter(torch.randn(()))
self.d = torch.nn.Parameter(torch.randn(()))
def forward(self, x):
"""
In the forward function we accept a Tensor of input data and we must return
a Tensor of output data. We can use Modules defined in the constructor as
well as arbitrary operators on Tensors.
"""
return self.a + self.b * x + self.c * x ** 2 + self.d * x ** 3
def string(self):
"""
Just like any class in Python, you can also define custom method on PyTorch modules
"""
return f'y = {self.a.item()} + {self.b.item()} x + {self.c.item()} x^2 + {self.d.item()} x^3'
# Create Tensors to hold input and outputs.
x = torch.linspace(-math.pi, math.pi, 2000)
y = torch.sin(x)
# Construct our model by instantiating the class defined above
model = Polynomial3()
# Construct our loss function and an Optimizer. The call to model.parameters()
# in the SGD constructor will contain the learnable parameters (defined
# with torch.nn.Parameter) which are members of the model.
criterion = torch.nn.MSELoss(reduction='sum')
optimizer = torch.optim.SGD(model.parameters(), lr=1e-6)
for t in range(2000):
# Forward pass: Compute predicted y by passing x to the model
y_pred = model(x)
# Compute and print loss
loss = criterion(y_pred, y)
if t % 100 == 99:
print(t, loss.item())
# Zero gradients, perform a backward pass, and update the weights.
optimizer.zero_grad()
loss.backward()
optimizer.step()
print(f'Result: {model.string()}')

View File

@ -0,0 +1,68 @@
# -*- coding: utf-8 -*-
import random
import torch
import math
class DynamicNet(torch.nn.Module):
def __init__(self):
"""
In the constructor we instantiate five parameters and assign them as members.
"""
super().__init__()
self.a = torch.nn.Parameter(torch.randn(()))
self.b = torch.nn.Parameter(torch.randn(()))
self.c = torch.nn.Parameter(torch.randn(()))
self.d = torch.nn.Parameter(torch.randn(()))
self.e = torch.nn.Parameter(torch.randn(()))
def forward(self, x):
"""
For the forward pass of the model, we randomly choose either 4, 5
and reuse the e parameter to compute the contribution of these orders.
Since each forward pass builds a dynamic computation graph, we can use normal
Python control-flow operators like loops or conditional statements when
defining the forward pass of the model.
Here we also see that it is perfectly safe to reuse the same parameter many
times when defining a computational graph.
"""
y = self.a + self.b * x + self.c * x ** 2 + self.d * x ** 3
for exp in range(4, random.randint(4, 6)):
y = y + self.e * x ** exp
return y
def string(self):
"""
Just like any class in Python, you can also define custom method on PyTorch modules
"""
return f'y = {self.a.item()} + {self.b.item()} x + {self.c.item()} x^2 + {self.d.item()} x^3 + {self.e.item()} x^4 ? + {self.e.item()} x^5 ?'
# Create Tensors to hold input and outputs.
x = torch.linspace(-math.pi, math.pi, 2000)
y = torch.sin(x)
# Construct our model by instantiating the class defined above
model = DynamicNet()
# Construct our loss function and an Optimizer. Training this strange model with
# vanilla stochastic gradient descent is tough, so we use momentum
criterion = torch.nn.MSELoss(reduction='sum')
optimizer = torch.optim.SGD(model.parameters(), lr=1e-8, momentum=0.9)
for t in range(30000):
# Forward pass: Compute predicted y by passing x to the model
y_pred = model(x)
# Compute and print loss
loss = criterion(y_pred, y)
if t % 2000 == 1999:
print(t, loss.item())
# Zero gradients, perform a backward pass, and update the weights.
optimizer.zero_grad()
loss.backward()
optimizer.step()
print(f'Result: {model.string()}')

View File

@ -0,0 +1,201 @@
TV,Radio,Newspaper,Sales
230.1,37.8,69.2,22.1
44.5,39.3,45.1,10.4
17.2,45.9,69.3,12
151.5,41.3,58.5,16.5
180.8,10.8,58.4,17.9
8.7,48.9,75,7.2
57.5,32.8,23.5,11.8
120.2,19.6,11.6,13.2
8.6,2.1,1,4.8
199.8,2.6,21.2,15.6
66.1,5.8,24.2,12.6
214.7,24,4,17.4
23.8,35.1,65.9,9.2
97.5,7.6,7.2,13.7
204.1,32.9,46,19
195.4,47.7,52.9,22.4
67.8,36.6,114,12.5
281.4,39.6,55.8,24.4
69.2,20.5,18.3,11.3
147.3,23.9,19.1,14.6
218.4,27.7,53.4,18
237.4,5.1,23.5,17.5
13.2,15.9,49.6,5.6
228.3,16.9,26.2,20.5
62.3,12.6,18.3,9.7
262.9,3.5,19.5,17
142.9,29.3,12.6,15
240.1,16.7,22.9,20.9
248.8,27.1,22.9,18.9
70.6,16,40.8,10.5
292.9,28.3,43.2,21.4
112.9,17.4,38.6,11.9
97.2,1.5,30,13.2
265.6,20,0.3,17.4
95.7,1.4,7.4,11.9
290.7,4.1,8.5,17.8
266.9,43.8,5,25.4
74.7,49.4,45.7,14.7
43.1,26.7,35.1,10.1
228,37.7,32,21.5
202.5,22.3,31.6,16.6
177,33.4,38.7,17.1
293.6,27.7,1.8,20.7
206.9,8.4,26.4,17.9
25.1,25.7,43.3,8.5
175.1,22.5,31.5,16.1
89.7,9.9,35.7,10.6
239.9,41.5,18.5,23.2
227.2,15.8,49.9,19.8
66.9,11.7,36.8,9.7
199.8,3.1,34.6,16.4
100.4,9.6,3.6,10.7
216.4,41.7,39.6,22.6
182.6,46.2,58.7,21.2
262.7,28.8,15.9,20.2
198.9,49.4,60,23.7
7.3,28.1,41.4,5.5
136.2,19.2,16.6,13.2
210.8,49.6,37.7,23.8
210.7,29.5,9.3,18.4
53.5,2,21.4,8.1
261.3,42.7,54.7,24.2
239.3,15.5,27.3,20.7
102.7,29.6,8.4,14
131.1,42.8,28.9,16
69,9.3,0.9,11.3
31.5,24.6,2.2,11
139.3,14.5,10.2,13.4
237.4,27.5,11,18.9
216.8,43.9,27.2,22.3
199.1,30.6,38.7,18.3
109.8,14.3,31.7,12.4
26.8,33,19.3,8.8
129.4,5.7,31.3,11
213.4,24.6,13.1,17
16.9,43.7,89.4,8.7
27.5,1.6,20.7,6.9
120.5,28.5,14.2,14.2
5.4,29.9,9.4,5.3
116,7.7,23.1,11
76.4,26.7,22.3,11.8
239.8,4.1,36.9,17.3
75.3,20.3,32.5,11.3
68.4,44.5,35.6,13.6
213.5,43,33.8,21.7
193.2,18.4,65.7,20.2
76.3,27.5,16,12
110.7,40.6,63.2,16
88.3,25.5,73.4,12.9
109.8,47.8,51.4,16.7
134.3,4.9,9.3,14
28.6,1.5,33,7.3
217.7,33.5,59,19.4
250.9,36.5,72.3,22.2
107.4,14,10.9,11.5
163.3,31.6,52.9,16.9
197.6,3.5,5.9,16.7
184.9,21,22,20.5
289.7,42.3,51.2,25.4
135.2,41.7,45.9,17.2
222.4,4.3,49.8,16.7
296.4,36.3,100.9,23.8
280.2,10.1,21.4,19.8
187.9,17.2,17.9,19.7
238.2,34.3,5.3,20.7
137.9,46.4,59,15
25,11,29.7,7.2
90.4,0.3,23.2,12
13.1,0.4,25.6,5.3
255.4,26.9,5.5,19.8
225.8,8.2,56.5,18.4
241.7,38,23.2,21.8
175.7,15.4,2.4,17.1
209.6,20.6,10.7,20.9
78.2,46.8,34.5,14.6
75.1,35,52.7,12.6
139.2,14.3,25.6,12.2
76.4,0.8,14.8,9.4
125.7,36.9,79.2,15.9
19.4,16,22.3,6.6
141.3,26.8,46.2,15.5
18.8,21.7,50.4,7
224,2.4,15.6,16.6
123.1,34.6,12.4,15.2
229.5,32.3,74.2,19.7
87.2,11.8,25.9,10.6
7.8,38.9,50.6,6.6
80.2,0,9.2,11.9
220.3,49,3.2,24.7
59.6,12,43.1,9.7
0.7,39.6,8.7,1.6
265.2,2.9,43,17.7
8.4,27.2,2.1,5.7
219.8,33.5,45.1,19.6
36.9,38.6,65.6,10.8
48.3,47,8.5,11.6
25.6,39,9.3,9.5
273.7,28.9,59.7,20.8
43,25.9,20.5,9.6
184.9,43.9,1.7,20.7
73.4,17,12.9,10.9
193.7,35.4,75.6,19.2
220.5,33.2,37.9,20.1
104.6,5.7,34.4,10.4
96.2,14.8,38.9,12.3
140.3,1.9,9,10.3
240.1,7.3,8.7,18.2
243.2,49,44.3,25.4
38,40.3,11.9,10.9
44.7,25.8,20.6,10.1
280.7,13.9,37,16.1
121,8.4,48.7,11.6
197.6,23.3,14.2,16.6
171.3,39.7,37.7,16
187.8,21.1,9.5,20.6
4.1,11.6,5.7,3.2
93.9,43.5,50.5,15.3
149.8,1.3,24.3,10.1
11.7,36.9,45.2,7.3
131.7,18.4,34.6,12.9
172.5,18.1,30.7,16.4
85.7,35.8,49.3,13.3
188.4,18.1,25.6,19.9
163.5,36.8,7.4,18
117.2,14.7,5.4,11.9
234.5,3.4,84.8,16.9
17.9,37.6,21.6,8
206.8,5.2,19.4,17.2
215.4,23.6,57.6,17.1
284.3,10.6,6.4,20
50,11.6,18.4,8.4
164.5,20.9,47.4,17.5
19.6,20.1,17,7.6
168.4,7.1,12.8,16.7
222.4,3.4,13.1,16.5
276.9,48.9,41.8,27
248.4,30.2,20.3,20.2
170.2,7.8,35.2,16.7
276.7,2.3,23.7,16.8
165.6,10,17.6,17.6
156.6,2.6,8.3,15.5
218.5,5.4,27.4,17.2
56.2,5.7,29.7,8.7
287.6,43,71.8,26.2
253.8,21.3,30,17.6
205,45.1,19.6,22.6
139.5,2.1,26.6,10.3
191.1,28.7,18.2,17.3
286,13.9,3.7,20.9
18.7,12.1,23.4,6.7
39.5,41.1,5.8,10.8
75.5,10.8,6,11.9
17.2,4.1,31.6,5.9
166.8,42,3.6,19.6
149.7,35.6,6,17.3
38.2,3.7,13.8,7.6
94.2,4.9,8.1,14
177,9.3,6.4,14.8
283.6,42,66.2,25.5
232.1,8.6,8.7,18.4
1 TV Radio Newspaper Sales
2 230.1 37.8 69.2 22.1
3 44.5 39.3 45.1 10.4
4 17.2 45.9 69.3 12
5 151.5 41.3 58.5 16.5
6 180.8 10.8 58.4 17.9
7 8.7 48.9 75 7.2
8 57.5 32.8 23.5 11.8
9 120.2 19.6 11.6 13.2
10 8.6 2.1 1 4.8
11 199.8 2.6 21.2 15.6
12 66.1 5.8 24.2 12.6
13 214.7 24 4 17.4
14 23.8 35.1 65.9 9.2
15 97.5 7.6 7.2 13.7
16 204.1 32.9 46 19
17 195.4 47.7 52.9 22.4
18 67.8 36.6 114 12.5
19 281.4 39.6 55.8 24.4
20 69.2 20.5 18.3 11.3
21 147.3 23.9 19.1 14.6
22 218.4 27.7 53.4 18
23 237.4 5.1 23.5 17.5
24 13.2 15.9 49.6 5.6
25 228.3 16.9 26.2 20.5
26 62.3 12.6 18.3 9.7
27 262.9 3.5 19.5 17
28 142.9 29.3 12.6 15
29 240.1 16.7 22.9 20.9
30 248.8 27.1 22.9 18.9
31 70.6 16 40.8 10.5
32 292.9 28.3 43.2 21.4
33 112.9 17.4 38.6 11.9
34 97.2 1.5 30 13.2
35 265.6 20 0.3 17.4
36 95.7 1.4 7.4 11.9
37 290.7 4.1 8.5 17.8
38 266.9 43.8 5 25.4
39 74.7 49.4 45.7 14.7
40 43.1 26.7 35.1 10.1
41 228 37.7 32 21.5
42 202.5 22.3 31.6 16.6
43 177 33.4 38.7 17.1
44 293.6 27.7 1.8 20.7
45 206.9 8.4 26.4 17.9
46 25.1 25.7 43.3 8.5
47 175.1 22.5 31.5 16.1
48 89.7 9.9 35.7 10.6
49 239.9 41.5 18.5 23.2
50 227.2 15.8 49.9 19.8
51 66.9 11.7 36.8 9.7
52 199.8 3.1 34.6 16.4
53 100.4 9.6 3.6 10.7
54 216.4 41.7 39.6 22.6
55 182.6 46.2 58.7 21.2
56 262.7 28.8 15.9 20.2
57 198.9 49.4 60 23.7
58 7.3 28.1 41.4 5.5
59 136.2 19.2 16.6 13.2
60 210.8 49.6 37.7 23.8
61 210.7 29.5 9.3 18.4
62 53.5 2 21.4 8.1
63 261.3 42.7 54.7 24.2
64 239.3 15.5 27.3 20.7
65 102.7 29.6 8.4 14
66 131.1 42.8 28.9 16
67 69 9.3 0.9 11.3
68 31.5 24.6 2.2 11
69 139.3 14.5 10.2 13.4
70 237.4 27.5 11 18.9
71 216.8 43.9 27.2 22.3
72 199.1 30.6 38.7 18.3
73 109.8 14.3 31.7 12.4
74 26.8 33 19.3 8.8
75 129.4 5.7 31.3 11
76 213.4 24.6 13.1 17
77 16.9 43.7 89.4 8.7
78 27.5 1.6 20.7 6.9
79 120.5 28.5 14.2 14.2
80 5.4 29.9 9.4 5.3
81 116 7.7 23.1 11
82 76.4 26.7 22.3 11.8
83 239.8 4.1 36.9 17.3
84 75.3 20.3 32.5 11.3
85 68.4 44.5 35.6 13.6
86 213.5 43 33.8 21.7
87 193.2 18.4 65.7 20.2
88 76.3 27.5 16 12
89 110.7 40.6 63.2 16
90 88.3 25.5 73.4 12.9
91 109.8 47.8 51.4 16.7
92 134.3 4.9 9.3 14
93 28.6 1.5 33 7.3
94 217.7 33.5 59 19.4
95 250.9 36.5 72.3 22.2
96 107.4 14 10.9 11.5
97 163.3 31.6 52.9 16.9
98 197.6 3.5 5.9 16.7
99 184.9 21 22 20.5
100 289.7 42.3 51.2 25.4
101 135.2 41.7 45.9 17.2
102 222.4 4.3 49.8 16.7
103 296.4 36.3 100.9 23.8
104 280.2 10.1 21.4 19.8
105 187.9 17.2 17.9 19.7
106 238.2 34.3 5.3 20.7
107 137.9 46.4 59 15
108 25 11 29.7 7.2
109 90.4 0.3 23.2 12
110 13.1 0.4 25.6 5.3
111 255.4 26.9 5.5 19.8
112 225.8 8.2 56.5 18.4
113 241.7 38 23.2 21.8
114 175.7 15.4 2.4 17.1
115 209.6 20.6 10.7 20.9
116 78.2 46.8 34.5 14.6
117 75.1 35 52.7 12.6
118 139.2 14.3 25.6 12.2
119 76.4 0.8 14.8 9.4
120 125.7 36.9 79.2 15.9
121 19.4 16 22.3 6.6
122 141.3 26.8 46.2 15.5
123 18.8 21.7 50.4 7
124 224 2.4 15.6 16.6
125 123.1 34.6 12.4 15.2
126 229.5 32.3 74.2 19.7
127 87.2 11.8 25.9 10.6
128 7.8 38.9 50.6 6.6
129 80.2 0 9.2 11.9
130 220.3 49 3.2 24.7
131 59.6 12 43.1 9.7
132 0.7 39.6 8.7 1.6
133 265.2 2.9 43 17.7
134 8.4 27.2 2.1 5.7
135 219.8 33.5 45.1 19.6
136 36.9 38.6 65.6 10.8
137 48.3 47 8.5 11.6
138 25.6 39 9.3 9.5
139 273.7 28.9 59.7 20.8
140 43 25.9 20.5 9.6
141 184.9 43.9 1.7 20.7
142 73.4 17 12.9 10.9
143 193.7 35.4 75.6 19.2
144 220.5 33.2 37.9 20.1
145 104.6 5.7 34.4 10.4
146 96.2 14.8 38.9 12.3
147 140.3 1.9 9 10.3
148 240.1 7.3 8.7 18.2
149 243.2 49 44.3 25.4
150 38 40.3 11.9 10.9
151 44.7 25.8 20.6 10.1
152 280.7 13.9 37 16.1
153 121 8.4 48.7 11.6
154 197.6 23.3 14.2 16.6
155 171.3 39.7 37.7 16
156 187.8 21.1 9.5 20.6
157 4.1 11.6 5.7 3.2
158 93.9 43.5 50.5 15.3
159 149.8 1.3 24.3 10.1
160 11.7 36.9 45.2 7.3
161 131.7 18.4 34.6 12.9
162 172.5 18.1 30.7 16.4
163 85.7 35.8 49.3 13.3
164 188.4 18.1 25.6 19.9
165 163.5 36.8 7.4 18
166 117.2 14.7 5.4 11.9
167 234.5 3.4 84.8 16.9
168 17.9 37.6 21.6 8
169 206.8 5.2 19.4 17.2
170 215.4 23.6 57.6 17.1
171 284.3 10.6 6.4 20
172 50 11.6 18.4 8.4
173 164.5 20.9 47.4 17.5
174 19.6 20.1 17 7.6
175 168.4 7.1 12.8 16.7
176 222.4 3.4 13.1 16.5
177 276.9 48.9 41.8 27
178 248.4 30.2 20.3 20.2
179 170.2 7.8 35.2 16.7
180 276.7 2.3 23.7 16.8
181 165.6 10 17.6 17.6
182 156.6 2.6 8.3 15.5
183 218.5 5.4 27.4 17.2
184 56.2 5.7 29.7 8.7
185 287.6 43 71.8 26.2
186 253.8 21.3 30 17.6
187 205 45.1 19.6 22.6
188 139.5 2.1 26.6 10.3
189 191.1 28.7 18.2 17.3
190 286 13.9 3.7 20.9
191 18.7 12.1 23.4 6.7
192 39.5 41.1 5.8 10.8
193 75.5 10.8 6 11.9
194 17.2 4.1 31.6 5.9
195 166.8 42 3.6 19.6
196 149.7 35.6 6 17.3
197 38.2 3.7 13.8 7.6
198 94.2 4.9 8.1 14
199 177 9.3 6.4 14.8
200 283.6 42 66.2 25.5
201 232.1 8.6 8.7 18.4

View File

@ -0,0 +1,25 @@
import pandas as pd
import torch
import torch.nn as nn
df = pd.read_csv("./RegressionModels/AdvertisementPrediction/advertising.csv")
X = torch.tensor(df[["TV", "Radio", "Newspaper"]].values, dtype=torch.float32)
Y = torch.tensor(df["Sales"].values, dtype=torch.float32)
model = torch.nn.Sequential(
torch.nn.Linear(3,1)
)
loss_fn = torch.nn.MSELoss()
optimizer = torch.optim.SGD(model.parameters(), lr=3e-5)
for epoch in range(2000):
y_pred = model(X)
loss = loss_fn(y_pred, Y)
optimizer.zero_grad()
loss.backward()
optimizer.step()
if epoch % 100 == 99:
print(f'Epoch {epoch+1}, Loss: {loss.item():.2f}')

View File

@ -0,0 +1,399 @@
mpg,cylinders,displacement,horsepower,weight,acceleration,model year,origin,car name
18,8,307,130,3504,12,70,1,chevrolet chevelle malibu
15,8,350,165,3693,11.5,70,1,buick skylark 320
18,8,318,150,3436,11,70,1,plymouth satellite
16,8,304,150,3433,12,70,1,amc rebel sst
17,8,302,140,3449,10.5,70,1,ford torino
15,8,429,198,4341,10,70,1,ford galaxie 500
14,8,454,220,4354,9,70,1,chevrolet impala
14,8,440,215,4312,8.5,70,1,plymouth fury iii
14,8,455,225,4425,10,70,1,pontiac catalina
15,8,390,190,3850,8.5,70,1,amc ambassador dpl
15,8,383,170,3563,10,70,1,dodge challenger se
14,8,340,160,3609,8,70,1,plymouth 'cuda 340
15,8,400,150,3761,9.5,70,1,chevrolet monte carlo
14,8,455,225,3086,10,70,1,buick estate wagon (sw)
24,4,113,95,2372,15,70,3,toyota corona mark ii
22,6,198,95,2833,15.5,70,1,plymouth duster
18,6,199,97,2774,15.5,70,1,amc hornet
21,6,200,85,2587,16,70,1,ford maverick
27,4,97,88,2130,14.5,70,3,datsun pl510
26,4,97,46,1835,20.5,70,2,volkswagen 1131 deluxe sedan
25,4,110,87,2672,17.5,70,2,peugeot 504
24,4,107,90,2430,14.5,70,2,audi 100 ls
25,4,104,95,2375,17.5,70,2,saab 99e
26,4,121,113,2234,12.5,70,2,bmw 2002
21,6,199,90,2648,15,70,1,amc gremlin
10,8,360,215,4615,14,70,1,ford f250
10,8,307,200,4376,15,70,1,chevy c20
11,8,318,210,4382,13.5,70,1,dodge d200
9,8,304,193,4732,18.5,70,1,hi 1200d
27,4,97,88,2130,14.5,71,3,datsun pl510
28,4,140,90,2264,15.5,71,1,chevrolet vega 2300
25,4,113,95,2228,14,71,3,toyota corona
25,4,98,?,2046,19,71,1,ford pinto
19,6,232,100,2634,13,71,1,amc gremlin
16,6,225,105,3439,15.5,71,1,plymouth satellite custom
17,6,250,100,3329,15.5,71,1,chevrolet chevelle malibu
19,6,250,88,3302,15.5,71,1,ford torino 500
18,6,232,100,3288,15.5,71,1,amc matador
14,8,350,165,4209,12,71,1,chevrolet impala
14,8,400,175,4464,11.5,71,1,pontiac catalina brougham
14,8,351,153,4154,13.5,71,1,ford galaxie 500
14,8,318,150,4096,13,71,1,plymouth fury iii
12,8,383,180,4955,11.5,71,1,dodge monaco (sw)
13,8,400,170,4746,12,71,1,ford country squire (sw)
13,8,400,175,5140,12,71,1,pontiac safari (sw)
18,6,258,110,2962,13.5,71,1,amc hornet sportabout (sw)
22,4,140,72,2408,19,71,1,chevrolet vega (sw)
19,6,250,100,3282,15,71,1,pontiac firebird
18,6,250,88,3139,14.5,71,1,ford mustang
23,4,122,86,2220,14,71,1,mercury capri 2000
28,4,116,90,2123,14,71,2,opel 1900
30,4,79,70,2074,19.5,71,2,peugeot 304
30,4,88,76,2065,14.5,71,2,fiat 124b
31,4,71,65,1773,19,71,3,toyota corolla 1200
35,4,72,69,1613,18,71,3,datsun 1200
27,4,97,60,1834,19,71,2,volkswagen model 111
26,4,91,70,1955,20.5,71,1,plymouth cricket
24,4,113,95,2278,15.5,72,3,toyota corona hardtop
25,4,97.5,80,2126,17,72,1,dodge colt hardtop
23,4,97,54,2254,23.5,72,2,volkswagen type 3
20,4,140,90,2408,19.5,72,1,chevrolet vega
21,4,122,86,2226,16.5,72,1,ford pinto runabout
13,8,350,165,4274,12,72,1,chevrolet impala
14,8,400,175,4385,12,72,1,pontiac catalina
15,8,318,150,4135,13.5,72,1,plymouth fury iii
14,8,351,153,4129,13,72,1,ford galaxie 500
17,8,304,150,3672,11.5,72,1,amc ambassador sst
11,8,429,208,4633,11,72,1,mercury marquis
13,8,350,155,4502,13.5,72,1,buick lesabre custom
12,8,350,160,4456,13.5,72,1,oldsmobile delta 88 royale
13,8,400,190,4422,12.5,72,1,chrysler newport royal
19,3,70,97,2330,13.5,72,3,mazda rx2 coupe
15,8,304,150,3892,12.5,72,1,amc matador (sw)
13,8,307,130,4098,14,72,1,chevrolet chevelle concours (sw)
13,8,302,140,4294,16,72,1,ford gran torino (sw)
14,8,318,150,4077,14,72,1,plymouth satellite custom (sw)
18,4,121,112,2933,14.5,72,2,volvo 145e (sw)
22,4,121,76,2511,18,72,2,volkswagen 411 (sw)
21,4,120,87,2979,19.5,72,2,peugeot 504 (sw)
26,4,96,69,2189,18,72,2,renault 12 (sw)
22,4,122,86,2395,16,72,1,ford pinto (sw)
28,4,97,92,2288,17,72,3,datsun 510 (sw)
23,4,120,97,2506,14.5,72,3,toyouta corona mark ii (sw)
28,4,98,80,2164,15,72,1,dodge colt (sw)
27,4,97,88,2100,16.5,72,3,toyota corolla 1600 (sw)
13,8,350,175,4100,13,73,1,buick century 350
14,8,304,150,3672,11.5,73,1,amc matador
13,8,350,145,3988,13,73,1,chevrolet malibu
14,8,302,137,4042,14.5,73,1,ford gran torino
15,8,318,150,3777,12.5,73,1,dodge coronet custom
12,8,429,198,4952,11.5,73,1,mercury marquis brougham
13,8,400,150,4464,12,73,1,chevrolet caprice classic
13,8,351,158,4363,13,73,1,ford ltd
14,8,318,150,4237,14.5,73,1,plymouth fury gran sedan
13,8,440,215,4735,11,73,1,chrysler new yorker brougham
12,8,455,225,4951,11,73,1,buick electra 225 custom
13,8,360,175,3821,11,73,1,amc ambassador brougham
18,6,225,105,3121,16.5,73,1,plymouth valiant
16,6,250,100,3278,18,73,1,chevrolet nova custom
18,6,232,100,2945,16,73,1,amc hornet
18,6,250,88,3021,16.5,73,1,ford maverick
23,6,198,95,2904,16,73,1,plymouth duster
26,4,97,46,1950,21,73,2,volkswagen super beetle
11,8,400,150,4997,14,73,1,chevrolet impala
12,8,400,167,4906,12.5,73,1,ford country
13,8,360,170,4654,13,73,1,plymouth custom suburb
12,8,350,180,4499,12.5,73,1,oldsmobile vista cruiser
18,6,232,100,2789,15,73,1,amc gremlin
20,4,97,88,2279,19,73,3,toyota carina
21,4,140,72,2401,19.5,73,1,chevrolet vega
22,4,108,94,2379,16.5,73,3,datsun 610
18,3,70,90,2124,13.5,73,3,maxda rx3
19,4,122,85,2310,18.5,73,1,ford pinto
21,6,155,107,2472,14,73,1,mercury capri v6
26,4,98,90,2265,15.5,73,2,fiat 124 sport coupe
15,8,350,145,4082,13,73,1,chevrolet monte carlo s
16,8,400,230,4278,9.5,73,1,pontiac grand prix
29,4,68,49,1867,19.5,73,2,fiat 128
24,4,116,75,2158,15.5,73,2,opel manta
20,4,114,91,2582,14,73,2,audi 100ls
19,4,121,112,2868,15.5,73,2,volvo 144ea
15,8,318,150,3399,11,73,1,dodge dart custom
24,4,121,110,2660,14,73,2,saab 99le
20,6,156,122,2807,13.5,73,3,toyota mark ii
11,8,350,180,3664,11,73,1,oldsmobile omega
20,6,198,95,3102,16.5,74,1,plymouth duster
21,6,200,?,2875,17,74,1,ford maverick
19,6,232,100,2901,16,74,1,amc hornet
15,6,250,100,3336,17,74,1,chevrolet nova
31,4,79,67,1950,19,74,3,datsun b210
26,4,122,80,2451,16.5,74,1,ford pinto
32,4,71,65,1836,21,74,3,toyota corolla 1200
25,4,140,75,2542,17,74,1,chevrolet vega
16,6,250,100,3781,17,74,1,chevrolet chevelle malibu classic
16,6,258,110,3632,18,74,1,amc matador
18,6,225,105,3613,16.5,74,1,plymouth satellite sebring
16,8,302,140,4141,14,74,1,ford gran torino
13,8,350,150,4699,14.5,74,1,buick century luxus (sw)
14,8,318,150,4457,13.5,74,1,dodge coronet custom (sw)
14,8,302,140,4638,16,74,1,ford gran torino (sw)
14,8,304,150,4257,15.5,74,1,amc matador (sw)
29,4,98,83,2219,16.5,74,2,audi fox
26,4,79,67,1963,15.5,74,2,volkswagen dasher
26,4,97,78,2300,14.5,74,2,opel manta
31,4,76,52,1649,16.5,74,3,toyota corona
32,4,83,61,2003,19,74,3,datsun 710
28,4,90,75,2125,14.5,74,1,dodge colt
24,4,90,75,2108,15.5,74,2,fiat 128
26,4,116,75,2246,14,74,2,fiat 124 tc
24,4,120,97,2489,15,74,3,honda civic
26,4,108,93,2391,15.5,74,3,subaru
31,4,79,67,2000,16,74,2,fiat x1.9
19,6,225,95,3264,16,75,1,plymouth valiant custom
18,6,250,105,3459,16,75,1,chevrolet nova
15,6,250,72,3432,21,75,1,mercury monarch
15,6,250,72,3158,19.5,75,1,ford maverick
16,8,400,170,4668,11.5,75,1,pontiac catalina
15,8,350,145,4440,14,75,1,chevrolet bel air
16,8,318,150,4498,14.5,75,1,plymouth grand fury
14,8,351,148,4657,13.5,75,1,ford ltd
17,6,231,110,3907,21,75,1,buick century
16,6,250,105,3897,18.5,75,1,chevroelt chevelle malibu
15,6,258,110,3730,19,75,1,amc matador
18,6,225,95,3785,19,75,1,plymouth fury
21,6,231,110,3039,15,75,1,buick skyhawk
20,8,262,110,3221,13.5,75,1,chevrolet monza 2+2
13,8,302,129,3169,12,75,1,ford mustang ii
29,4,97,75,2171,16,75,3,toyota corolla
23,4,140,83,2639,17,75,1,ford pinto
20,6,232,100,2914,16,75,1,amc gremlin
23,4,140,78,2592,18.5,75,1,pontiac astro
24,4,134,96,2702,13.5,75,3,toyota corona
25,4,90,71,2223,16.5,75,2,volkswagen dasher
24,4,119,97,2545,17,75,3,datsun 710
18,6,171,97,2984,14.5,75,1,ford pinto
29,4,90,70,1937,14,75,2,volkswagen rabbit
19,6,232,90,3211,17,75,1,amc pacer
23,4,115,95,2694,15,75,2,audi 100ls
23,4,120,88,2957,17,75,2,peugeot 504
22,4,121,98,2945,14.5,75,2,volvo 244dl
25,4,121,115,2671,13.5,75,2,saab 99le
33,4,91,53,1795,17.5,75,3,honda civic cvcc
28,4,107,86,2464,15.5,76,2,fiat 131
25,4,116,81,2220,16.9,76,2,opel 1900
25,4,140,92,2572,14.9,76,1,capri ii
26,4,98,79,2255,17.7,76,1,dodge colt
27,4,101,83,2202,15.3,76,2,renault 12tl
17.5,8,305,140,4215,13,76,1,chevrolet chevelle malibu classic
16,8,318,150,4190,13,76,1,dodge coronet brougham
15.5,8,304,120,3962,13.9,76,1,amc matador
14.5,8,351,152,4215,12.8,76,1,ford gran torino
22,6,225,100,3233,15.4,76,1,plymouth valiant
22,6,250,105,3353,14.5,76,1,chevrolet nova
24,6,200,81,3012,17.6,76,1,ford maverick
22.5,6,232,90,3085,17.6,76,1,amc hornet
29,4,85,52,2035,22.2,76,1,chevrolet chevette
24.5,4,98,60,2164,22.1,76,1,chevrolet woody
29,4,90,70,1937,14.2,76,2,vw rabbit
33,4,91,53,1795,17.4,76,3,honda civic
20,6,225,100,3651,17.7,76,1,dodge aspen se
18,6,250,78,3574,21,76,1,ford granada ghia
18.5,6,250,110,3645,16.2,76,1,pontiac ventura sj
17.5,6,258,95,3193,17.8,76,1,amc pacer d/l
29.5,4,97,71,1825,12.2,76,2,volkswagen rabbit
32,4,85,70,1990,17,76,3,datsun b-210
28,4,97,75,2155,16.4,76,3,toyota corolla
26.5,4,140,72,2565,13.6,76,1,ford pinto
20,4,130,102,3150,15.7,76,2,volvo 245
13,8,318,150,3940,13.2,76,1,plymouth volare premier v8
19,4,120,88,3270,21.9,76,2,peugeot 504
19,6,156,108,2930,15.5,76,3,toyota mark ii
16.5,6,168,120,3820,16.7,76,2,mercedes-benz 280s
16.5,8,350,180,4380,12.1,76,1,cadillac seville
13,8,350,145,4055,12,76,1,chevy c10
13,8,302,130,3870,15,76,1,ford f108
13,8,318,150,3755,14,76,1,dodge d100
31.5,4,98,68,2045,18.5,77,3,honda accord cvcc
30,4,111,80,2155,14.8,77,1,buick opel isuzu deluxe
36,4,79,58,1825,18.6,77,2,renault 5 gtl
25.5,4,122,96,2300,15.5,77,1,plymouth arrow gs
33.5,4,85,70,1945,16.8,77,3,datsun f-10 hatchback
17.5,8,305,145,3880,12.5,77,1,chevrolet caprice classic
17,8,260,110,4060,19,77,1,oldsmobile cutlass supreme
15.5,8,318,145,4140,13.7,77,1,dodge monaco brougham
15,8,302,130,4295,14.9,77,1,mercury cougar brougham
17.5,6,250,110,3520,16.4,77,1,chevrolet concours
20.5,6,231,105,3425,16.9,77,1,buick skylark
19,6,225,100,3630,17.7,77,1,plymouth volare custom
18.5,6,250,98,3525,19,77,1,ford granada
16,8,400,180,4220,11.1,77,1,pontiac grand prix lj
15.5,8,350,170,4165,11.4,77,1,chevrolet monte carlo landau
15.5,8,400,190,4325,12.2,77,1,chrysler cordoba
16,8,351,149,4335,14.5,77,1,ford thunderbird
29,4,97,78,1940,14.5,77,2,volkswagen rabbit custom
24.5,4,151,88,2740,16,77,1,pontiac sunbird coupe
26,4,97,75,2265,18.2,77,3,toyota corolla liftback
25.5,4,140,89,2755,15.8,77,1,ford mustang ii 2+2
30.5,4,98,63,2051,17,77,1,chevrolet chevette
33.5,4,98,83,2075,15.9,77,1,dodge colt m/m
30,4,97,67,1985,16.4,77,3,subaru dl
30.5,4,97,78,2190,14.1,77,2,volkswagen dasher
22,6,146,97,2815,14.5,77,3,datsun 810
21.5,4,121,110,2600,12.8,77,2,bmw 320i
21.5,3,80,110,2720,13.5,77,3,mazda rx-4
43.1,4,90,48,1985,21.5,78,2,volkswagen rabbit custom diesel
36.1,4,98,66,1800,14.4,78,1,ford fiesta
32.8,4,78,52,1985,19.4,78,3,mazda glc deluxe
39.4,4,85,70,2070,18.6,78,3,datsun b210 gx
36.1,4,91,60,1800,16.4,78,3,honda civic cvcc
19.9,8,260,110,3365,15.5,78,1,oldsmobile cutlass salon brougham
19.4,8,318,140,3735,13.2,78,1,dodge diplomat
20.2,8,302,139,3570,12.8,78,1,mercury monarch ghia
19.2,6,231,105,3535,19.2,78,1,pontiac phoenix lj
20.5,6,200,95,3155,18.2,78,1,chevrolet malibu
20.2,6,200,85,2965,15.8,78,1,ford fairmont (auto)
25.1,4,140,88,2720,15.4,78,1,ford fairmont (man)
20.5,6,225,100,3430,17.2,78,1,plymouth volare
19.4,6,232,90,3210,17.2,78,1,amc concord
20.6,6,231,105,3380,15.8,78,1,buick century special
20.8,6,200,85,3070,16.7,78,1,mercury zephyr
18.6,6,225,110,3620,18.7,78,1,dodge aspen
18.1,6,258,120,3410,15.1,78,1,amc concord d/l
19.2,8,305,145,3425,13.2,78,1,chevrolet monte carlo landau
17.7,6,231,165,3445,13.4,78,1,buick regal sport coupe (turbo)
18.1,8,302,139,3205,11.2,78,1,ford futura
17.5,8,318,140,4080,13.7,78,1,dodge magnum xe
30,4,98,68,2155,16.5,78,1,chevrolet chevette
27.5,4,134,95,2560,14.2,78,3,toyota corona
27.2,4,119,97,2300,14.7,78,3,datsun 510
30.9,4,105,75,2230,14.5,78,1,dodge omni
21.1,4,134,95,2515,14.8,78,3,toyota celica gt liftback
23.2,4,156,105,2745,16.7,78,1,plymouth sapporo
23.8,4,151,85,2855,17.6,78,1,oldsmobile starfire sx
23.9,4,119,97,2405,14.9,78,3,datsun 200-sx
20.3,5,131,103,2830,15.9,78,2,audi 5000
17,6,163,125,3140,13.6,78,2,volvo 264gl
21.6,4,121,115,2795,15.7,78,2,saab 99gle
16.2,6,163,133,3410,15.8,78,2,peugeot 604sl
31.5,4,89,71,1990,14.9,78,2,volkswagen scirocco
29.5,4,98,68,2135,16.6,78,3,honda accord lx
21.5,6,231,115,3245,15.4,79,1,pontiac lemans v6
19.8,6,200,85,2990,18.2,79,1,mercury zephyr 6
22.3,4,140,88,2890,17.3,79,1,ford fairmont 4
20.2,6,232,90,3265,18.2,79,1,amc concord dl 6
20.6,6,225,110,3360,16.6,79,1,dodge aspen 6
17,8,305,130,3840,15.4,79,1,chevrolet caprice classic
17.6,8,302,129,3725,13.4,79,1,ford ltd landau
16.5,8,351,138,3955,13.2,79,1,mercury grand marquis
18.2,8,318,135,3830,15.2,79,1,dodge st. regis
16.9,8,350,155,4360,14.9,79,1,buick estate wagon (sw)
15.5,8,351,142,4054,14.3,79,1,ford country squire (sw)
19.2,8,267,125,3605,15,79,1,chevrolet malibu classic (sw)
18.5,8,360,150,3940,13,79,1,chrysler lebaron town @ country (sw)
31.9,4,89,71,1925,14,79,2,vw rabbit custom
34.1,4,86,65,1975,15.2,79,3,maxda glc deluxe
35.7,4,98,80,1915,14.4,79,1,dodge colt hatchback custom
27.4,4,121,80,2670,15,79,1,amc spirit dl
25.4,5,183,77,3530,20.1,79,2,mercedes benz 300d
23,8,350,125,3900,17.4,79,1,cadillac eldorado
27.2,4,141,71,3190,24.8,79,2,peugeot 504
23.9,8,260,90,3420,22.2,79,1,oldsmobile cutlass salon brougham
34.2,4,105,70,2200,13.2,79,1,plymouth horizon
34.5,4,105,70,2150,14.9,79,1,plymouth horizon tc3
31.8,4,85,65,2020,19.2,79,3,datsun 210
37.3,4,91,69,2130,14.7,79,2,fiat strada custom
28.4,4,151,90,2670,16,79,1,buick skylark limited
28.8,6,173,115,2595,11.3,79,1,chevrolet citation
26.8,6,173,115,2700,12.9,79,1,oldsmobile omega brougham
33.5,4,151,90,2556,13.2,79,1,pontiac phoenix
41.5,4,98,76,2144,14.7,80,2,vw rabbit
38.1,4,89,60,1968,18.8,80,3,toyota corolla tercel
32.1,4,98,70,2120,15.5,80,1,chevrolet chevette
37.2,4,86,65,2019,16.4,80,3,datsun 310
28,4,151,90,2678,16.5,80,1,chevrolet citation
26.4,4,140,88,2870,18.1,80,1,ford fairmont
24.3,4,151,90,3003,20.1,80,1,amc concord
19.1,6,225,90,3381,18.7,80,1,dodge aspen
34.3,4,97,78,2188,15.8,80,2,audi 4000
29.8,4,134,90,2711,15.5,80,3,toyota corona liftback
31.3,4,120,75,2542,17.5,80,3,mazda 626
37,4,119,92,2434,15,80,3,datsun 510 hatchback
32.2,4,108,75,2265,15.2,80,3,toyota corolla
46.6,4,86,65,2110,17.9,80,3,mazda glc
27.9,4,156,105,2800,14.4,80,1,dodge colt
40.8,4,85,65,2110,19.2,80,3,datsun 210
44.3,4,90,48,2085,21.7,80,2,vw rabbit c (diesel)
43.4,4,90,48,2335,23.7,80,2,vw dasher (diesel)
36.4,5,121,67,2950,19.9,80,2,audi 5000s (diesel)
30,4,146,67,3250,21.8,80,2,mercedes-benz 240d
44.6,4,91,67,1850,13.8,80,3,honda civic 1500 gl
40.9,4,85,?,1835,17.3,80,2,renault lecar deluxe
33.8,4,97,67,2145,18,80,3,subaru dl
29.8,4,89,62,1845,15.3,80,2,vokswagen rabbit
32.7,6,168,132,2910,11.4,80,3,datsun 280-zx
23.7,3,70,100,2420,12.5,80,3,mazda rx-7 gs
35,4,122,88,2500,15.1,80,2,triumph tr7 coupe
23.6,4,140,?,2905,14.3,80,1,ford mustang cobra
32.4,4,107,72,2290,17,80,3,honda accord
27.2,4,135,84,2490,15.7,81,1,plymouth reliant
26.6,4,151,84,2635,16.4,81,1,buick skylark
25.8,4,156,92,2620,14.4,81,1,dodge aries wagon (sw)
23.5,6,173,110,2725,12.6,81,1,chevrolet citation
30,4,135,84,2385,12.9,81,1,plymouth reliant
39.1,4,79,58,1755,16.9,81,3,toyota starlet
39,4,86,64,1875,16.4,81,1,plymouth champ
35.1,4,81,60,1760,16.1,81,3,honda civic 1300
32.3,4,97,67,2065,17.8,81,3,subaru
37,4,85,65,1975,19.4,81,3,datsun 210 mpg
37.7,4,89,62,2050,17.3,81,3,toyota tercel
34.1,4,91,68,1985,16,81,3,mazda glc 4
34.7,4,105,63,2215,14.9,81,1,plymouth horizon 4
34.4,4,98,65,2045,16.2,81,1,ford escort 4w
29.9,4,98,65,2380,20.7,81,1,ford escort 2h
33,4,105,74,2190,14.2,81,2,volkswagen jetta
34.5,4,100,?,2320,15.8,81,2,renault 18i
33.7,4,107,75,2210,14.4,81,3,honda prelude
32.4,4,108,75,2350,16.8,81,3,toyota corolla
32.9,4,119,100,2615,14.8,81,3,datsun 200sx
31.6,4,120,74,2635,18.3,81,3,mazda 626
28.1,4,141,80,3230,20.4,81,2,peugeot 505s turbo diesel
30.7,6,145,76,3160,19.6,81,2,volvo diesel
25.4,6,168,116,2900,12.6,81,3,toyota cressida
24.2,6,146,120,2930,13.8,81,3,datsun 810 maxima
22.4,6,231,110,3415,15.8,81,1,buick century
26.6,8,350,105,3725,19,81,1,oldsmobile cutlass ls
20.2,6,200,88,3060,17.1,81,1,ford granada gl
17.6,6,225,85,3465,16.6,81,1,chrysler lebaron salon
28,4,112,88,2605,19.6,82,1,chevrolet cavalier
27,4,112,88,2640,18.6,82,1,chevrolet cavalier wagon
34,4,112,88,2395,18,82,1,chevrolet cavalier 2-door
31,4,112,85,2575,16.2,82,1,pontiac j2000 se hatchback
29,4,135,84,2525,16,82,1,dodge aries se
27,4,151,90,2735,18,82,1,pontiac phoenix
24,4,140,92,2865,16.4,82,1,ford fairmont futura
23,4,151,?,3035,20.5,82,1,amc concord dl
36,4,105,74,1980,15.3,82,2,volkswagen rabbit l
37,4,91,68,2025,18.2,82,3,mazda glc custom l
31,4,91,68,1970,17.6,82,3,mazda glc custom
38,4,105,63,2125,14.7,82,1,plymouth horizon miser
36,4,98,70,2125,17.3,82,1,mercury lynx l
36,4,120,88,2160,14.5,82,3,nissan stanza xe
36,4,107,75,2205,14.5,82,3,honda accord
34,4,108,70,2245,16.9,82,3,toyota corolla
38,4,91,67,1965,15,82,3,honda civic
32,4,91,67,1965,15.7,82,3,honda civic (auto)
38,4,91,67,1995,16.2,82,3,datsun 310 gx
25,6,181,110,2945,16.4,82,1,buick century limited
38,6,262,85,3015,17,82,1,oldsmobile cutlass ciera (diesel)
26,4,156,92,2585,14.5,82,1,chrysler lebaron medallion
22,6,232,112,2835,14.7,82,1,ford granada l
32,4,144,96,2665,13.9,82,3,toyota celica gt
36,4,135,84,2370,13,82,1,dodge charger 2.2
27,4,151,90,2950,17.3,82,1,chevrolet camaro
27,4,140,86,2790,15.6,82,1,ford mustang gl
44,4,97,52,2130,24.6,82,2,vw pickup
32,4,135,84,2295,11.6,82,1,dodge rampage
28,4,120,79,2625,18.6,82,1,ford ranger
31,4,119,82,2720,19.4,82,1,chevy s-10
1 mpg cylinders displacement horsepower weight acceleration model year origin car name
2 18 8 307 130 3504 12 70 1 chevrolet chevelle malibu
3 15 8 350 165 3693 11.5 70 1 buick skylark 320
4 18 8 318 150 3436 11 70 1 plymouth satellite
5 16 8 304 150 3433 12 70 1 amc rebel sst
6 17 8 302 140 3449 10.5 70 1 ford torino
7 15 8 429 198 4341 10 70 1 ford galaxie 500
8 14 8 454 220 4354 9 70 1 chevrolet impala
9 14 8 440 215 4312 8.5 70 1 plymouth fury iii
10 14 8 455 225 4425 10 70 1 pontiac catalina
11 15 8 390 190 3850 8.5 70 1 amc ambassador dpl
12 15 8 383 170 3563 10 70 1 dodge challenger se
13 14 8 340 160 3609 8 70 1 plymouth 'cuda 340
14 15 8 400 150 3761 9.5 70 1 chevrolet monte carlo
15 14 8 455 225 3086 10 70 1 buick estate wagon (sw)
16 24 4 113 95 2372 15 70 3 toyota corona mark ii
17 22 6 198 95 2833 15.5 70 1 plymouth duster
18 18 6 199 97 2774 15.5 70 1 amc hornet
19 21 6 200 85 2587 16 70 1 ford maverick
20 27 4 97 88 2130 14.5 70 3 datsun pl510
21 26 4 97 46 1835 20.5 70 2 volkswagen 1131 deluxe sedan
22 25 4 110 87 2672 17.5 70 2 peugeot 504
23 24 4 107 90 2430 14.5 70 2 audi 100 ls
24 25 4 104 95 2375 17.5 70 2 saab 99e
25 26 4 121 113 2234 12.5 70 2 bmw 2002
26 21 6 199 90 2648 15 70 1 amc gremlin
27 10 8 360 215 4615 14 70 1 ford f250
28 10 8 307 200 4376 15 70 1 chevy c20
29 11 8 318 210 4382 13.5 70 1 dodge d200
30 9 8 304 193 4732 18.5 70 1 hi 1200d
31 27 4 97 88 2130 14.5 71 3 datsun pl510
32 28 4 140 90 2264 15.5 71 1 chevrolet vega 2300
33 25 4 113 95 2228 14 71 3 toyota corona
34 25 4 98 ? 2046 19 71 1 ford pinto
35 19 6 232 100 2634 13 71 1 amc gremlin
36 16 6 225 105 3439 15.5 71 1 plymouth satellite custom
37 17 6 250 100 3329 15.5 71 1 chevrolet chevelle malibu
38 19 6 250 88 3302 15.5 71 1 ford torino 500
39 18 6 232 100 3288 15.5 71 1 amc matador
40 14 8 350 165 4209 12 71 1 chevrolet impala
41 14 8 400 175 4464 11.5 71 1 pontiac catalina brougham
42 14 8 351 153 4154 13.5 71 1 ford galaxie 500
43 14 8 318 150 4096 13 71 1 plymouth fury iii
44 12 8 383 180 4955 11.5 71 1 dodge monaco (sw)
45 13 8 400 170 4746 12 71 1 ford country squire (sw)
46 13 8 400 175 5140 12 71 1 pontiac safari (sw)
47 18 6 258 110 2962 13.5 71 1 amc hornet sportabout (sw)
48 22 4 140 72 2408 19 71 1 chevrolet vega (sw)
49 19 6 250 100 3282 15 71 1 pontiac firebird
50 18 6 250 88 3139 14.5 71 1 ford mustang
51 23 4 122 86 2220 14 71 1 mercury capri 2000
52 28 4 116 90 2123 14 71 2 opel 1900
53 30 4 79 70 2074 19.5 71 2 peugeot 304
54 30 4 88 76 2065 14.5 71 2 fiat 124b
55 31 4 71 65 1773 19 71 3 toyota corolla 1200
56 35 4 72 69 1613 18 71 3 datsun 1200
57 27 4 97 60 1834 19 71 2 volkswagen model 111
58 26 4 91 70 1955 20.5 71 1 plymouth cricket
59 24 4 113 95 2278 15.5 72 3 toyota corona hardtop
60 25 4 97.5 80 2126 17 72 1 dodge colt hardtop
61 23 4 97 54 2254 23.5 72 2 volkswagen type 3
62 20 4 140 90 2408 19.5 72 1 chevrolet vega
63 21 4 122 86 2226 16.5 72 1 ford pinto runabout
64 13 8 350 165 4274 12 72 1 chevrolet impala
65 14 8 400 175 4385 12 72 1 pontiac catalina
66 15 8 318 150 4135 13.5 72 1 plymouth fury iii
67 14 8 351 153 4129 13 72 1 ford galaxie 500
68 17 8 304 150 3672 11.5 72 1 amc ambassador sst
69 11 8 429 208 4633 11 72 1 mercury marquis
70 13 8 350 155 4502 13.5 72 1 buick lesabre custom
71 12 8 350 160 4456 13.5 72 1 oldsmobile delta 88 royale
72 13 8 400 190 4422 12.5 72 1 chrysler newport royal
73 19 3 70 97 2330 13.5 72 3 mazda rx2 coupe
74 15 8 304 150 3892 12.5 72 1 amc matador (sw)
75 13 8 307 130 4098 14 72 1 chevrolet chevelle concours (sw)
76 13 8 302 140 4294 16 72 1 ford gran torino (sw)
77 14 8 318 150 4077 14 72 1 plymouth satellite custom (sw)
78 18 4 121 112 2933 14.5 72 2 volvo 145e (sw)
79 22 4 121 76 2511 18 72 2 volkswagen 411 (sw)
80 21 4 120 87 2979 19.5 72 2 peugeot 504 (sw)
81 26 4 96 69 2189 18 72 2 renault 12 (sw)
82 22 4 122 86 2395 16 72 1 ford pinto (sw)
83 28 4 97 92 2288 17 72 3 datsun 510 (sw)
84 23 4 120 97 2506 14.5 72 3 toyouta corona mark ii (sw)
85 28 4 98 80 2164 15 72 1 dodge colt (sw)
86 27 4 97 88 2100 16.5 72 3 toyota corolla 1600 (sw)
87 13 8 350 175 4100 13 73 1 buick century 350
88 14 8 304 150 3672 11.5 73 1 amc matador
89 13 8 350 145 3988 13 73 1 chevrolet malibu
90 14 8 302 137 4042 14.5 73 1 ford gran torino
91 15 8 318 150 3777 12.5 73 1 dodge coronet custom
92 12 8 429 198 4952 11.5 73 1 mercury marquis brougham
93 13 8 400 150 4464 12 73 1 chevrolet caprice classic
94 13 8 351 158 4363 13 73 1 ford ltd
95 14 8 318 150 4237 14.5 73 1 plymouth fury gran sedan
96 13 8 440 215 4735 11 73 1 chrysler new yorker brougham
97 12 8 455 225 4951 11 73 1 buick electra 225 custom
98 13 8 360 175 3821 11 73 1 amc ambassador brougham
99 18 6 225 105 3121 16.5 73 1 plymouth valiant
100 16 6 250 100 3278 18 73 1 chevrolet nova custom
101 18 6 232 100 2945 16 73 1 amc hornet
102 18 6 250 88 3021 16.5 73 1 ford maverick
103 23 6 198 95 2904 16 73 1 plymouth duster
104 26 4 97 46 1950 21 73 2 volkswagen super beetle
105 11 8 400 150 4997 14 73 1 chevrolet impala
106 12 8 400 167 4906 12.5 73 1 ford country
107 13 8 360 170 4654 13 73 1 plymouth custom suburb
108 12 8 350 180 4499 12.5 73 1 oldsmobile vista cruiser
109 18 6 232 100 2789 15 73 1 amc gremlin
110 20 4 97 88 2279 19 73 3 toyota carina
111 21 4 140 72 2401 19.5 73 1 chevrolet vega
112 22 4 108 94 2379 16.5 73 3 datsun 610
113 18 3 70 90 2124 13.5 73 3 maxda rx3
114 19 4 122 85 2310 18.5 73 1 ford pinto
115 21 6 155 107 2472 14 73 1 mercury capri v6
116 26 4 98 90 2265 15.5 73 2 fiat 124 sport coupe
117 15 8 350 145 4082 13 73 1 chevrolet monte carlo s
118 16 8 400 230 4278 9.5 73 1 pontiac grand prix
119 29 4 68 49 1867 19.5 73 2 fiat 128
120 24 4 116 75 2158 15.5 73 2 opel manta
121 20 4 114 91 2582 14 73 2 audi 100ls
122 19 4 121 112 2868 15.5 73 2 volvo 144ea
123 15 8 318 150 3399 11 73 1 dodge dart custom
124 24 4 121 110 2660 14 73 2 saab 99le
125 20 6 156 122 2807 13.5 73 3 toyota mark ii
126 11 8 350 180 3664 11 73 1 oldsmobile omega
127 20 6 198 95 3102 16.5 74 1 plymouth duster
128 21 6 200 ? 2875 17 74 1 ford maverick
129 19 6 232 100 2901 16 74 1 amc hornet
130 15 6 250 100 3336 17 74 1 chevrolet nova
131 31 4 79 67 1950 19 74 3 datsun b210
132 26 4 122 80 2451 16.5 74 1 ford pinto
133 32 4 71 65 1836 21 74 3 toyota corolla 1200
134 25 4 140 75 2542 17 74 1 chevrolet vega
135 16 6 250 100 3781 17 74 1 chevrolet chevelle malibu classic
136 16 6 258 110 3632 18 74 1 amc matador
137 18 6 225 105 3613 16.5 74 1 plymouth satellite sebring
138 16 8 302 140 4141 14 74 1 ford gran torino
139 13 8 350 150 4699 14.5 74 1 buick century luxus (sw)
140 14 8 318 150 4457 13.5 74 1 dodge coronet custom (sw)
141 14 8 302 140 4638 16 74 1 ford gran torino (sw)
142 14 8 304 150 4257 15.5 74 1 amc matador (sw)
143 29 4 98 83 2219 16.5 74 2 audi fox
144 26 4 79 67 1963 15.5 74 2 volkswagen dasher
145 26 4 97 78 2300 14.5 74 2 opel manta
146 31 4 76 52 1649 16.5 74 3 toyota corona
147 32 4 83 61 2003 19 74 3 datsun 710
148 28 4 90 75 2125 14.5 74 1 dodge colt
149 24 4 90 75 2108 15.5 74 2 fiat 128
150 26 4 116 75 2246 14 74 2 fiat 124 tc
151 24 4 120 97 2489 15 74 3 honda civic
152 26 4 108 93 2391 15.5 74 3 subaru
153 31 4 79 67 2000 16 74 2 fiat x1.9
154 19 6 225 95 3264 16 75 1 plymouth valiant custom
155 18 6 250 105 3459 16 75 1 chevrolet nova
156 15 6 250 72 3432 21 75 1 mercury monarch
157 15 6 250 72 3158 19.5 75 1 ford maverick
158 16 8 400 170 4668 11.5 75 1 pontiac catalina
159 15 8 350 145 4440 14 75 1 chevrolet bel air
160 16 8 318 150 4498 14.5 75 1 plymouth grand fury
161 14 8 351 148 4657 13.5 75 1 ford ltd
162 17 6 231 110 3907 21 75 1 buick century
163 16 6 250 105 3897 18.5 75 1 chevroelt chevelle malibu
164 15 6 258 110 3730 19 75 1 amc matador
165 18 6 225 95 3785 19 75 1 plymouth fury
166 21 6 231 110 3039 15 75 1 buick skyhawk
167 20 8 262 110 3221 13.5 75 1 chevrolet monza 2+2
168 13 8 302 129 3169 12 75 1 ford mustang ii
169 29 4 97 75 2171 16 75 3 toyota corolla
170 23 4 140 83 2639 17 75 1 ford pinto
171 20 6 232 100 2914 16 75 1 amc gremlin
172 23 4 140 78 2592 18.5 75 1 pontiac astro
173 24 4 134 96 2702 13.5 75 3 toyota corona
174 25 4 90 71 2223 16.5 75 2 volkswagen dasher
175 24 4 119 97 2545 17 75 3 datsun 710
176 18 6 171 97 2984 14.5 75 1 ford pinto
177 29 4 90 70 1937 14 75 2 volkswagen rabbit
178 19 6 232 90 3211 17 75 1 amc pacer
179 23 4 115 95 2694 15 75 2 audi 100ls
180 23 4 120 88 2957 17 75 2 peugeot 504
181 22 4 121 98 2945 14.5 75 2 volvo 244dl
182 25 4 121 115 2671 13.5 75 2 saab 99le
183 33 4 91 53 1795 17.5 75 3 honda civic cvcc
184 28 4 107 86 2464 15.5 76 2 fiat 131
185 25 4 116 81 2220 16.9 76 2 opel 1900
186 25 4 140 92 2572 14.9 76 1 capri ii
187 26 4 98 79 2255 17.7 76 1 dodge colt
188 27 4 101 83 2202 15.3 76 2 renault 12tl
189 17.5 8 305 140 4215 13 76 1 chevrolet chevelle malibu classic
190 16 8 318 150 4190 13 76 1 dodge coronet brougham
191 15.5 8 304 120 3962 13.9 76 1 amc matador
192 14.5 8 351 152 4215 12.8 76 1 ford gran torino
193 22 6 225 100 3233 15.4 76 1 plymouth valiant
194 22 6 250 105 3353 14.5 76 1 chevrolet nova
195 24 6 200 81 3012 17.6 76 1 ford maverick
196 22.5 6 232 90 3085 17.6 76 1 amc hornet
197 29 4 85 52 2035 22.2 76 1 chevrolet chevette
198 24.5 4 98 60 2164 22.1 76 1 chevrolet woody
199 29 4 90 70 1937 14.2 76 2 vw rabbit
200 33 4 91 53 1795 17.4 76 3 honda civic
201 20 6 225 100 3651 17.7 76 1 dodge aspen se
202 18 6 250 78 3574 21 76 1 ford granada ghia
203 18.5 6 250 110 3645 16.2 76 1 pontiac ventura sj
204 17.5 6 258 95 3193 17.8 76 1 amc pacer d/l
205 29.5 4 97 71 1825 12.2 76 2 volkswagen rabbit
206 32 4 85 70 1990 17 76 3 datsun b-210
207 28 4 97 75 2155 16.4 76 3 toyota corolla
208 26.5 4 140 72 2565 13.6 76 1 ford pinto
209 20 4 130 102 3150 15.7 76 2 volvo 245
210 13 8 318 150 3940 13.2 76 1 plymouth volare premier v8
211 19 4 120 88 3270 21.9 76 2 peugeot 504
212 19 6 156 108 2930 15.5 76 3 toyota mark ii
213 16.5 6 168 120 3820 16.7 76 2 mercedes-benz 280s
214 16.5 8 350 180 4380 12.1 76 1 cadillac seville
215 13 8 350 145 4055 12 76 1 chevy c10
216 13 8 302 130 3870 15 76 1 ford f108
217 13 8 318 150 3755 14 76 1 dodge d100
218 31.5 4 98 68 2045 18.5 77 3 honda accord cvcc
219 30 4 111 80 2155 14.8 77 1 buick opel isuzu deluxe
220 36 4 79 58 1825 18.6 77 2 renault 5 gtl
221 25.5 4 122 96 2300 15.5 77 1 plymouth arrow gs
222 33.5 4 85 70 1945 16.8 77 3 datsun f-10 hatchback
223 17.5 8 305 145 3880 12.5 77 1 chevrolet caprice classic
224 17 8 260 110 4060 19 77 1 oldsmobile cutlass supreme
225 15.5 8 318 145 4140 13.7 77 1 dodge monaco brougham
226 15 8 302 130 4295 14.9 77 1 mercury cougar brougham
227 17.5 6 250 110 3520 16.4 77 1 chevrolet concours
228 20.5 6 231 105 3425 16.9 77 1 buick skylark
229 19 6 225 100 3630 17.7 77 1 plymouth volare custom
230 18.5 6 250 98 3525 19 77 1 ford granada
231 16 8 400 180 4220 11.1 77 1 pontiac grand prix lj
232 15.5 8 350 170 4165 11.4 77 1 chevrolet monte carlo landau
233 15.5 8 400 190 4325 12.2 77 1 chrysler cordoba
234 16 8 351 149 4335 14.5 77 1 ford thunderbird
235 29 4 97 78 1940 14.5 77 2 volkswagen rabbit custom
236 24.5 4 151 88 2740 16 77 1 pontiac sunbird coupe
237 26 4 97 75 2265 18.2 77 3 toyota corolla liftback
238 25.5 4 140 89 2755 15.8 77 1 ford mustang ii 2+2
239 30.5 4 98 63 2051 17 77 1 chevrolet chevette
240 33.5 4 98 83 2075 15.9 77 1 dodge colt m/m
241 30 4 97 67 1985 16.4 77 3 subaru dl
242 30.5 4 97 78 2190 14.1 77 2 volkswagen dasher
243 22 6 146 97 2815 14.5 77 3 datsun 810
244 21.5 4 121 110 2600 12.8 77 2 bmw 320i
245 21.5 3 80 110 2720 13.5 77 3 mazda rx-4
246 43.1 4 90 48 1985 21.5 78 2 volkswagen rabbit custom diesel
247 36.1 4 98 66 1800 14.4 78 1 ford fiesta
248 32.8 4 78 52 1985 19.4 78 3 mazda glc deluxe
249 39.4 4 85 70 2070 18.6 78 3 datsun b210 gx
250 36.1 4 91 60 1800 16.4 78 3 honda civic cvcc
251 19.9 8 260 110 3365 15.5 78 1 oldsmobile cutlass salon brougham
252 19.4 8 318 140 3735 13.2 78 1 dodge diplomat
253 20.2 8 302 139 3570 12.8 78 1 mercury monarch ghia
254 19.2 6 231 105 3535 19.2 78 1 pontiac phoenix lj
255 20.5 6 200 95 3155 18.2 78 1 chevrolet malibu
256 20.2 6 200 85 2965 15.8 78 1 ford fairmont (auto)
257 25.1 4 140 88 2720 15.4 78 1 ford fairmont (man)
258 20.5 6 225 100 3430 17.2 78 1 plymouth volare
259 19.4 6 232 90 3210 17.2 78 1 amc concord
260 20.6 6 231 105 3380 15.8 78 1 buick century special
261 20.8 6 200 85 3070 16.7 78 1 mercury zephyr
262 18.6 6 225 110 3620 18.7 78 1 dodge aspen
263 18.1 6 258 120 3410 15.1 78 1 amc concord d/l
264 19.2 8 305 145 3425 13.2 78 1 chevrolet monte carlo landau
265 17.7 6 231 165 3445 13.4 78 1 buick regal sport coupe (turbo)
266 18.1 8 302 139 3205 11.2 78 1 ford futura
267 17.5 8 318 140 4080 13.7 78 1 dodge magnum xe
268 30 4 98 68 2155 16.5 78 1 chevrolet chevette
269 27.5 4 134 95 2560 14.2 78 3 toyota corona
270 27.2 4 119 97 2300 14.7 78 3 datsun 510
271 30.9 4 105 75 2230 14.5 78 1 dodge omni
272 21.1 4 134 95 2515 14.8 78 3 toyota celica gt liftback
273 23.2 4 156 105 2745 16.7 78 1 plymouth sapporo
274 23.8 4 151 85 2855 17.6 78 1 oldsmobile starfire sx
275 23.9 4 119 97 2405 14.9 78 3 datsun 200-sx
276 20.3 5 131 103 2830 15.9 78 2 audi 5000
277 17 6 163 125 3140 13.6 78 2 volvo 264gl
278 21.6 4 121 115 2795 15.7 78 2 saab 99gle
279 16.2 6 163 133 3410 15.8 78 2 peugeot 604sl
280 31.5 4 89 71 1990 14.9 78 2 volkswagen scirocco
281 29.5 4 98 68 2135 16.6 78 3 honda accord lx
282 21.5 6 231 115 3245 15.4 79 1 pontiac lemans v6
283 19.8 6 200 85 2990 18.2 79 1 mercury zephyr 6
284 22.3 4 140 88 2890 17.3 79 1 ford fairmont 4
285 20.2 6 232 90 3265 18.2 79 1 amc concord dl 6
286 20.6 6 225 110 3360 16.6 79 1 dodge aspen 6
287 17 8 305 130 3840 15.4 79 1 chevrolet caprice classic
288 17.6 8 302 129 3725 13.4 79 1 ford ltd landau
289 16.5 8 351 138 3955 13.2 79 1 mercury grand marquis
290 18.2 8 318 135 3830 15.2 79 1 dodge st. regis
291 16.9 8 350 155 4360 14.9 79 1 buick estate wagon (sw)
292 15.5 8 351 142 4054 14.3 79 1 ford country squire (sw)
293 19.2 8 267 125 3605 15 79 1 chevrolet malibu classic (sw)
294 18.5 8 360 150 3940 13 79 1 chrysler lebaron town @ country (sw)
295 31.9 4 89 71 1925 14 79 2 vw rabbit custom
296 34.1 4 86 65 1975 15.2 79 3 maxda glc deluxe
297 35.7 4 98 80 1915 14.4 79 1 dodge colt hatchback custom
298 27.4 4 121 80 2670 15 79 1 amc spirit dl
299 25.4 5 183 77 3530 20.1 79 2 mercedes benz 300d
300 23 8 350 125 3900 17.4 79 1 cadillac eldorado
301 27.2 4 141 71 3190 24.8 79 2 peugeot 504
302 23.9 8 260 90 3420 22.2 79 1 oldsmobile cutlass salon brougham
303 34.2 4 105 70 2200 13.2 79 1 plymouth horizon
304 34.5 4 105 70 2150 14.9 79 1 plymouth horizon tc3
305 31.8 4 85 65 2020 19.2 79 3 datsun 210
306 37.3 4 91 69 2130 14.7 79 2 fiat strada custom
307 28.4 4 151 90 2670 16 79 1 buick skylark limited
308 28.8 6 173 115 2595 11.3 79 1 chevrolet citation
309 26.8 6 173 115 2700 12.9 79 1 oldsmobile omega brougham
310 33.5 4 151 90 2556 13.2 79 1 pontiac phoenix
311 41.5 4 98 76 2144 14.7 80 2 vw rabbit
312 38.1 4 89 60 1968 18.8 80 3 toyota corolla tercel
313 32.1 4 98 70 2120 15.5 80 1 chevrolet chevette
314 37.2 4 86 65 2019 16.4 80 3 datsun 310
315 28 4 151 90 2678 16.5 80 1 chevrolet citation
316 26.4 4 140 88 2870 18.1 80 1 ford fairmont
317 24.3 4 151 90 3003 20.1 80 1 amc concord
318 19.1 6 225 90 3381 18.7 80 1 dodge aspen
319 34.3 4 97 78 2188 15.8 80 2 audi 4000
320 29.8 4 134 90 2711 15.5 80 3 toyota corona liftback
321 31.3 4 120 75 2542 17.5 80 3 mazda 626
322 37 4 119 92 2434 15 80 3 datsun 510 hatchback
323 32.2 4 108 75 2265 15.2 80 3 toyota corolla
324 46.6 4 86 65 2110 17.9 80 3 mazda glc
325 27.9 4 156 105 2800 14.4 80 1 dodge colt
326 40.8 4 85 65 2110 19.2 80 3 datsun 210
327 44.3 4 90 48 2085 21.7 80 2 vw rabbit c (diesel)
328 43.4 4 90 48 2335 23.7 80 2 vw dasher (diesel)
329 36.4 5 121 67 2950 19.9 80 2 audi 5000s (diesel)
330 30 4 146 67 3250 21.8 80 2 mercedes-benz 240d
331 44.6 4 91 67 1850 13.8 80 3 honda civic 1500 gl
332 40.9 4 85 ? 1835 17.3 80 2 renault lecar deluxe
333 33.8 4 97 67 2145 18 80 3 subaru dl
334 29.8 4 89 62 1845 15.3 80 2 vokswagen rabbit
335 32.7 6 168 132 2910 11.4 80 3 datsun 280-zx
336 23.7 3 70 100 2420 12.5 80 3 mazda rx-7 gs
337 35 4 122 88 2500 15.1 80 2 triumph tr7 coupe
338 23.6 4 140 ? 2905 14.3 80 1 ford mustang cobra
339 32.4 4 107 72 2290 17 80 3 honda accord
340 27.2 4 135 84 2490 15.7 81 1 plymouth reliant
341 26.6 4 151 84 2635 16.4 81 1 buick skylark
342 25.8 4 156 92 2620 14.4 81 1 dodge aries wagon (sw)
343 23.5 6 173 110 2725 12.6 81 1 chevrolet citation
344 30 4 135 84 2385 12.9 81 1 plymouth reliant
345 39.1 4 79 58 1755 16.9 81 3 toyota starlet
346 39 4 86 64 1875 16.4 81 1 plymouth champ
347 35.1 4 81 60 1760 16.1 81 3 honda civic 1300
348 32.3 4 97 67 2065 17.8 81 3 subaru
349 37 4 85 65 1975 19.4 81 3 datsun 210 mpg
350 37.7 4 89 62 2050 17.3 81 3 toyota tercel
351 34.1 4 91 68 1985 16 81 3 mazda glc 4
352 34.7 4 105 63 2215 14.9 81 1 plymouth horizon 4
353 34.4 4 98 65 2045 16.2 81 1 ford escort 4w
354 29.9 4 98 65 2380 20.7 81 1 ford escort 2h
355 33 4 105 74 2190 14.2 81 2 volkswagen jetta
356 34.5 4 100 ? 2320 15.8 81 2 renault 18i
357 33.7 4 107 75 2210 14.4 81 3 honda prelude
358 32.4 4 108 75 2350 16.8 81 3 toyota corolla
359 32.9 4 119 100 2615 14.8 81 3 datsun 200sx
360 31.6 4 120 74 2635 18.3 81 3 mazda 626
361 28.1 4 141 80 3230 20.4 81 2 peugeot 505s turbo diesel
362 30.7 6 145 76 3160 19.6 81 2 volvo diesel
363 25.4 6 168 116 2900 12.6 81 3 toyota cressida
364 24.2 6 146 120 2930 13.8 81 3 datsun 810 maxima
365 22.4 6 231 110 3415 15.8 81 1 buick century
366 26.6 8 350 105 3725 19 81 1 oldsmobile cutlass ls
367 20.2 6 200 88 3060 17.1 81 1 ford granada gl
368 17.6 6 225 85 3465 16.6 81 1 chrysler lebaron salon
369 28 4 112 88 2605 19.6 82 1 chevrolet cavalier
370 27 4 112 88 2640 18.6 82 1 chevrolet cavalier wagon
371 34 4 112 88 2395 18 82 1 chevrolet cavalier 2-door
372 31 4 112 85 2575 16.2 82 1 pontiac j2000 se hatchback
373 29 4 135 84 2525 16 82 1 dodge aries se
374 27 4 151 90 2735 18 82 1 pontiac phoenix
375 24 4 140 92 2865 16.4 82 1 ford fairmont futura
376 23 4 151 ? 3035 20.5 82 1 amc concord dl
377 36 4 105 74 1980 15.3 82 2 volkswagen rabbit l
378 37 4 91 68 2025 18.2 82 3 mazda glc custom l
379 31 4 91 68 1970 17.6 82 3 mazda glc custom
380 38 4 105 63 2125 14.7 82 1 plymouth horizon miser
381 36 4 98 70 2125 17.3 82 1 mercury lynx l
382 36 4 120 88 2160 14.5 82 3 nissan stanza xe
383 36 4 107 75 2205 14.5 82 3 honda accord
384 34 4 108 70 2245 16.9 82 3 toyota corolla
385 38 4 91 67 1965 15 82 3 honda civic
386 32 4 91 67 1965 15.7 82 3 honda civic (auto)
387 38 4 91 67 1995 16.2 82 3 datsun 310 gx
388 25 6 181 110 2945 16.4 82 1 buick century limited
389 38 6 262 85 3015 17 82 1 oldsmobile cutlass ciera (diesel)
390 26 4 156 92 2585 14.5 82 1 chrysler lebaron medallion
391 22 6 232 112 2835 14.7 82 1 ford granada l
392 32 4 144 96 2665 13.9 82 3 toyota celica gt
393 36 4 135 84 2370 13 82 1 dodge charger 2.2
394 27 4 151 90 2950 17.3 82 1 chevrolet camaro
395 27 4 140 86 2790 15.6 82 1 ford mustang gl
396 44 4 97 52 2130 24.6 82 2 vw pickup
397 32 4 135 84 2295 11.6 82 1 dodge rampage
398 28 4 120 79 2625 18.6 82 1 ford ranger
399 31 4 119 82 2720 19.4 82 1 chevy s-10

View File

@ -0,0 +1,72 @@
import pandas as pd
import torch
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_absolute_error
import numpy as np
df = pd.read_csv("./RegressionModels/AutoMPG/auto-mpg.csv")
df = df.dropna(subset=df.columns) #drop empty lines
df = df[df['horsepower'] != '?'] #drop lines where horsepower is unknown
df['horsepower'] = df['horsepower'].astype(int) # convert object to int
# print(df.dtypes)
# print(df.iloc[:,1:8])
X_train, X_test, Y_train, Y_test = train_test_split(df.iloc[:, 1:8], df["mpg"], test_size=0.2, random_state=42) #split train and test data
scaler = StandardScaler()
X_scaled = scaler.fit_transform(X_train.values)
X = torch.tensor(X_scaled, dtype=torch.float32)
scaler_Y = StandardScaler()
Y_scaled = scaler_Y.fit_transform(Y_train.values.reshape(-1,1))
Y = torch.tensor(Y_scaled, dtype=torch.float32)
# X = torch.tensor(df.iloc[:, 1:8].values, dtype=torch.float32)
# Y = torch.tensor(df["mpg"].values, dtype=torch.float32)
model = torch.nn.Sequential( #increas complexity with more neural network layers!
torch.nn.Linear(7, 10),
torch.nn.ReLU(),
torch.nn.Linear(10, 1)
)
loss_fn = torch.nn.MSELoss()
optimizer = torch.optim.SGD(model.parameters(), lr=1e-3)
for epoch in range(3000):
pred_y = model(X)
loss = loss_fn(pred_y, Y)
optimizer.zero_grad()
loss.backward()
optimizer.step()
if epoch % 999 == 0:
print(f"Epoch: {epoch}, Loss: {loss.item():.2f}")
# Scale test features using the same scaler as training
X_test_scaled = scaler.transform(X_test.values)
X_test_tensor = torch.tensor(X_test_scaled, dtype=torch.float32)
# Get predictions (still in scaled space)
with torch.no_grad():
pred_Y_scaled = model(X_test_tensor).numpy()
# Inverse-transform predictions and true Y values to original scale
Y_test_array = Y_test.values.reshape(-1, 1) # shape (n, 1)
pred_Y = scaler_Y.inverse_transform(pred_Y_scaled) # shape (n, 1)
# Flatten to 1D for metrics
Y_test_unscaled_flat = Y_test_array.ravel()
pred_Y_flat = pred_Y.ravel()
mae = mean_absolute_error(Y_test_unscaled_flat, pred_Y_flat)
mape = np.mean(np.abs((Y_test_unscaled_flat - pred_Y_flat) / Y_test_unscaled_flat)) * 100
print(f"Test MAE: {mae:.2f}")
print(f"Test MAPE: {mape:.2f}%")

View File

@ -0,0 +1,507 @@
"","crim","zn","indus","chas","nox","rm","age","dis","rad","tax","ptratio","black","lstat","medv"
"1",0.00632,18,2.31,0,0.538,6.575,65.2,4.09,1,296,15.3,396.9,4.98,24
"2",0.02731,0,7.07,0,0.469,6.421,78.9,4.9671,2,242,17.8,396.9,9.14,21.6
"3",0.02729,0,7.07,0,0.469,7.185,61.1,4.9671,2,242,17.8,392.83,4.03,34.7
"4",0.03237,0,2.18,0,0.458,6.998,45.8,6.0622,3,222,18.7,394.63,2.94,33.4
"5",0.06905,0,2.18,0,0.458,7.147,54.2,6.0622,3,222,18.7,396.9,5.33,36.2
"6",0.02985,0,2.18,0,0.458,6.43,58.7,6.0622,3,222,18.7,394.12,5.21,28.7
"7",0.08829,12.5,7.87,0,0.524,6.012,66.6,5.5605,5,311,15.2,395.6,12.43,22.9
"8",0.14455,12.5,7.87,0,0.524,6.172,96.1,5.9505,5,311,15.2,396.9,19.15,27.1
"9",0.21124,12.5,7.87,0,0.524,5.631,100,6.0821,5,311,15.2,386.63,29.93,16.5
"10",0.17004,12.5,7.87,0,0.524,6.004,85.9,6.5921,5,311,15.2,386.71,17.1,18.9
"11",0.22489,12.5,7.87,0,0.524,6.377,94.3,6.3467,5,311,15.2,392.52,20.45,15
"12",0.11747,12.5,7.87,0,0.524,6.009,82.9,6.2267,5,311,15.2,396.9,13.27,18.9
"13",0.09378,12.5,7.87,0,0.524,5.889,39,5.4509,5,311,15.2,390.5,15.71,21.7
"14",0.62976,0,8.14,0,0.538,5.949,61.8,4.7075,4,307,21,396.9,8.26,20.4
"15",0.63796,0,8.14,0,0.538,6.096,84.5,4.4619,4,307,21,380.02,10.26,18.2
"16",0.62739,0,8.14,0,0.538,5.834,56.5,4.4986,4,307,21,395.62,8.47,19.9
"17",1.05393,0,8.14,0,0.538,5.935,29.3,4.4986,4,307,21,386.85,6.58,23.1
"18",0.7842,0,8.14,0,0.538,5.99,81.7,4.2579,4,307,21,386.75,14.67,17.5
"19",0.80271,0,8.14,0,0.538,5.456,36.6,3.7965,4,307,21,288.99,11.69,20.2
"20",0.7258,0,8.14,0,0.538,5.727,69.5,3.7965,4,307,21,390.95,11.28,18.2
"21",1.25179,0,8.14,0,0.538,5.57,98.1,3.7979,4,307,21,376.57,21.02,13.6
"22",0.85204,0,8.14,0,0.538,5.965,89.2,4.0123,4,307,21,392.53,13.83,19.6
"23",1.23247,0,8.14,0,0.538,6.142,91.7,3.9769,4,307,21,396.9,18.72,15.2
"24",0.98843,0,8.14,0,0.538,5.813,100,4.0952,4,307,21,394.54,19.88,14.5
"25",0.75026,0,8.14,0,0.538,5.924,94.1,4.3996,4,307,21,394.33,16.3,15.6
"26",0.84054,0,8.14,0,0.538,5.599,85.7,4.4546,4,307,21,303.42,16.51,13.9
"27",0.67191,0,8.14,0,0.538,5.813,90.3,4.682,4,307,21,376.88,14.81,16.6
"28",0.95577,0,8.14,0,0.538,6.047,88.8,4.4534,4,307,21,306.38,17.28,14.8
"29",0.77299,0,8.14,0,0.538,6.495,94.4,4.4547,4,307,21,387.94,12.8,18.4
"30",1.00245,0,8.14,0,0.538,6.674,87.3,4.239,4,307,21,380.23,11.98,21
"31",1.13081,0,8.14,0,0.538,5.713,94.1,4.233,4,307,21,360.17,22.6,12.7
"32",1.35472,0,8.14,0,0.538,6.072,100,4.175,4,307,21,376.73,13.04,14.5
"33",1.38799,0,8.14,0,0.538,5.95,82,3.99,4,307,21,232.6,27.71,13.2
"34",1.15172,0,8.14,0,0.538,5.701,95,3.7872,4,307,21,358.77,18.35,13.1
"35",1.61282,0,8.14,0,0.538,6.096,96.9,3.7598,4,307,21,248.31,20.34,13.5
"36",0.06417,0,5.96,0,0.499,5.933,68.2,3.3603,5,279,19.2,396.9,9.68,18.9
"37",0.09744,0,5.96,0,0.499,5.841,61.4,3.3779,5,279,19.2,377.56,11.41,20
"38",0.08014,0,5.96,0,0.499,5.85,41.5,3.9342,5,279,19.2,396.9,8.77,21
"39",0.17505,0,5.96,0,0.499,5.966,30.2,3.8473,5,279,19.2,393.43,10.13,24.7
"40",0.02763,75,2.95,0,0.428,6.595,21.8,5.4011,3,252,18.3,395.63,4.32,30.8
"41",0.03359,75,2.95,0,0.428,7.024,15.8,5.4011,3,252,18.3,395.62,1.98,34.9
"42",0.12744,0,6.91,0,0.448,6.77,2.9,5.7209,3,233,17.9,385.41,4.84,26.6
"43",0.1415,0,6.91,0,0.448,6.169,6.6,5.7209,3,233,17.9,383.37,5.81,25.3
"44",0.15936,0,6.91,0,0.448,6.211,6.5,5.7209,3,233,17.9,394.46,7.44,24.7
"45",0.12269,0,6.91,0,0.448,6.069,40,5.7209,3,233,17.9,389.39,9.55,21.2
"46",0.17142,0,6.91,0,0.448,5.682,33.8,5.1004,3,233,17.9,396.9,10.21,19.3
"47",0.18836,0,6.91,0,0.448,5.786,33.3,5.1004,3,233,17.9,396.9,14.15,20
"48",0.22927,0,6.91,0,0.448,6.03,85.5,5.6894,3,233,17.9,392.74,18.8,16.6
"49",0.25387,0,6.91,0,0.448,5.399,95.3,5.87,3,233,17.9,396.9,30.81,14.4
"50",0.21977,0,6.91,0,0.448,5.602,62,6.0877,3,233,17.9,396.9,16.2,19.4
"51",0.08873,21,5.64,0,0.439,5.963,45.7,6.8147,4,243,16.8,395.56,13.45,19.7
"52",0.04337,21,5.64,0,0.439,6.115,63,6.8147,4,243,16.8,393.97,9.43,20.5
"53",0.0536,21,5.64,0,0.439,6.511,21.1,6.8147,4,243,16.8,396.9,5.28,25
"54",0.04981,21,5.64,0,0.439,5.998,21.4,6.8147,4,243,16.8,396.9,8.43,23.4
"55",0.0136,75,4,0,0.41,5.888,47.6,7.3197,3,469,21.1,396.9,14.8,18.9
"56",0.01311,90,1.22,0,0.403,7.249,21.9,8.6966,5,226,17.9,395.93,4.81,35.4
"57",0.02055,85,0.74,0,0.41,6.383,35.7,9.1876,2,313,17.3,396.9,5.77,24.7
"58",0.01432,100,1.32,0,0.411,6.816,40.5,8.3248,5,256,15.1,392.9,3.95,31.6
"59",0.15445,25,5.13,0,0.453,6.145,29.2,7.8148,8,284,19.7,390.68,6.86,23.3
"60",0.10328,25,5.13,0,0.453,5.927,47.2,6.932,8,284,19.7,396.9,9.22,19.6
"61",0.14932,25,5.13,0,0.453,5.741,66.2,7.2254,8,284,19.7,395.11,13.15,18.7
"62",0.17171,25,5.13,0,0.453,5.966,93.4,6.8185,8,284,19.7,378.08,14.44,16
"63",0.11027,25,5.13,0,0.453,6.456,67.8,7.2255,8,284,19.7,396.9,6.73,22.2
"64",0.1265,25,5.13,0,0.453,6.762,43.4,7.9809,8,284,19.7,395.58,9.5,25
"65",0.01951,17.5,1.38,0,0.4161,7.104,59.5,9.2229,3,216,18.6,393.24,8.05,33
"66",0.03584,80,3.37,0,0.398,6.29,17.8,6.6115,4,337,16.1,396.9,4.67,23.5
"67",0.04379,80,3.37,0,0.398,5.787,31.1,6.6115,4,337,16.1,396.9,10.24,19.4
"68",0.05789,12.5,6.07,0,0.409,5.878,21.4,6.498,4,345,18.9,396.21,8.1,22
"69",0.13554,12.5,6.07,0,0.409,5.594,36.8,6.498,4,345,18.9,396.9,13.09,17.4
"70",0.12816,12.5,6.07,0,0.409,5.885,33,6.498,4,345,18.9,396.9,8.79,20.9
"71",0.08826,0,10.81,0,0.413,6.417,6.6,5.2873,4,305,19.2,383.73,6.72,24.2
"72",0.15876,0,10.81,0,0.413,5.961,17.5,5.2873,4,305,19.2,376.94,9.88,21.7
"73",0.09164,0,10.81,0,0.413,6.065,7.8,5.2873,4,305,19.2,390.91,5.52,22.8
"74",0.19539,0,10.81,0,0.413,6.245,6.2,5.2873,4,305,19.2,377.17,7.54,23.4
"75",0.07896,0,12.83,0,0.437,6.273,6,4.2515,5,398,18.7,394.92,6.78,24.1
"76",0.09512,0,12.83,0,0.437,6.286,45,4.5026,5,398,18.7,383.23,8.94,21.4
"77",0.10153,0,12.83,0,0.437,6.279,74.5,4.0522,5,398,18.7,373.66,11.97,20
"78",0.08707,0,12.83,0,0.437,6.14,45.8,4.0905,5,398,18.7,386.96,10.27,20.8
"79",0.05646,0,12.83,0,0.437,6.232,53.7,5.0141,5,398,18.7,386.4,12.34,21.2
"80",0.08387,0,12.83,0,0.437,5.874,36.6,4.5026,5,398,18.7,396.06,9.1,20.3
"81",0.04113,25,4.86,0,0.426,6.727,33.5,5.4007,4,281,19,396.9,5.29,28
"82",0.04462,25,4.86,0,0.426,6.619,70.4,5.4007,4,281,19,395.63,7.22,23.9
"83",0.03659,25,4.86,0,0.426,6.302,32.2,5.4007,4,281,19,396.9,6.72,24.8
"84",0.03551,25,4.86,0,0.426,6.167,46.7,5.4007,4,281,19,390.64,7.51,22.9
"85",0.05059,0,4.49,0,0.449,6.389,48,4.7794,3,247,18.5,396.9,9.62,23.9
"86",0.05735,0,4.49,0,0.449,6.63,56.1,4.4377,3,247,18.5,392.3,6.53,26.6
"87",0.05188,0,4.49,0,0.449,6.015,45.1,4.4272,3,247,18.5,395.99,12.86,22.5
"88",0.07151,0,4.49,0,0.449,6.121,56.8,3.7476,3,247,18.5,395.15,8.44,22.2
"89",0.0566,0,3.41,0,0.489,7.007,86.3,3.4217,2,270,17.8,396.9,5.5,23.6
"90",0.05302,0,3.41,0,0.489,7.079,63.1,3.4145,2,270,17.8,396.06,5.7,28.7
"91",0.04684,0,3.41,0,0.489,6.417,66.1,3.0923,2,270,17.8,392.18,8.81,22.6
"92",0.03932,0,3.41,0,0.489,6.405,73.9,3.0921,2,270,17.8,393.55,8.2,22
"93",0.04203,28,15.04,0,0.464,6.442,53.6,3.6659,4,270,18.2,395.01,8.16,22.9
"94",0.02875,28,15.04,0,0.464,6.211,28.9,3.6659,4,270,18.2,396.33,6.21,25
"95",0.04294,28,15.04,0,0.464,6.249,77.3,3.615,4,270,18.2,396.9,10.59,20.6
"96",0.12204,0,2.89,0,0.445,6.625,57.8,3.4952,2,276,18,357.98,6.65,28.4
"97",0.11504,0,2.89,0,0.445,6.163,69.6,3.4952,2,276,18,391.83,11.34,21.4
"98",0.12083,0,2.89,0,0.445,8.069,76,3.4952,2,276,18,396.9,4.21,38.7
"99",0.08187,0,2.89,0,0.445,7.82,36.9,3.4952,2,276,18,393.53,3.57,43.8
"100",0.0686,0,2.89,0,0.445,7.416,62.5,3.4952,2,276,18,396.9,6.19,33.2
"101",0.14866,0,8.56,0,0.52,6.727,79.9,2.7778,5,384,20.9,394.76,9.42,27.5
"102",0.11432,0,8.56,0,0.52,6.781,71.3,2.8561,5,384,20.9,395.58,7.67,26.5
"103",0.22876,0,8.56,0,0.52,6.405,85.4,2.7147,5,384,20.9,70.8,10.63,18.6
"104",0.21161,0,8.56,0,0.52,6.137,87.4,2.7147,5,384,20.9,394.47,13.44,19.3
"105",0.1396,0,8.56,0,0.52,6.167,90,2.421,5,384,20.9,392.69,12.33,20.1
"106",0.13262,0,8.56,0,0.52,5.851,96.7,2.1069,5,384,20.9,394.05,16.47,19.5
"107",0.1712,0,8.56,0,0.52,5.836,91.9,2.211,5,384,20.9,395.67,18.66,19.5
"108",0.13117,0,8.56,0,0.52,6.127,85.2,2.1224,5,384,20.9,387.69,14.09,20.4
"109",0.12802,0,8.56,0,0.52,6.474,97.1,2.4329,5,384,20.9,395.24,12.27,19.8
"110",0.26363,0,8.56,0,0.52,6.229,91.2,2.5451,5,384,20.9,391.23,15.55,19.4
"111",0.10793,0,8.56,0,0.52,6.195,54.4,2.7778,5,384,20.9,393.49,13,21.7
"112",0.10084,0,10.01,0,0.547,6.715,81.6,2.6775,6,432,17.8,395.59,10.16,22.8
"113",0.12329,0,10.01,0,0.547,5.913,92.9,2.3534,6,432,17.8,394.95,16.21,18.8
"114",0.22212,0,10.01,0,0.547,6.092,95.4,2.548,6,432,17.8,396.9,17.09,18.7
"115",0.14231,0,10.01,0,0.547,6.254,84.2,2.2565,6,432,17.8,388.74,10.45,18.5
"116",0.17134,0,10.01,0,0.547,5.928,88.2,2.4631,6,432,17.8,344.91,15.76,18.3
"117",0.13158,0,10.01,0,0.547,6.176,72.5,2.7301,6,432,17.8,393.3,12.04,21.2
"118",0.15098,0,10.01,0,0.547,6.021,82.6,2.7474,6,432,17.8,394.51,10.3,19.2
"119",0.13058,0,10.01,0,0.547,5.872,73.1,2.4775,6,432,17.8,338.63,15.37,20.4
"120",0.14476,0,10.01,0,0.547,5.731,65.2,2.7592,6,432,17.8,391.5,13.61,19.3
"121",0.06899,0,25.65,0,0.581,5.87,69.7,2.2577,2,188,19.1,389.15,14.37,22
"122",0.07165,0,25.65,0,0.581,6.004,84.1,2.1974,2,188,19.1,377.67,14.27,20.3
"123",0.09299,0,25.65,0,0.581,5.961,92.9,2.0869,2,188,19.1,378.09,17.93,20.5
"124",0.15038,0,25.65,0,0.581,5.856,97,1.9444,2,188,19.1,370.31,25.41,17.3
"125",0.09849,0,25.65,0,0.581,5.879,95.8,2.0063,2,188,19.1,379.38,17.58,18.8
"126",0.16902,0,25.65,0,0.581,5.986,88.4,1.9929,2,188,19.1,385.02,14.81,21.4
"127",0.38735,0,25.65,0,0.581,5.613,95.6,1.7572,2,188,19.1,359.29,27.26,15.7
"128",0.25915,0,21.89,0,0.624,5.693,96,1.7883,4,437,21.2,392.11,17.19,16.2
"129",0.32543,0,21.89,0,0.624,6.431,98.8,1.8125,4,437,21.2,396.9,15.39,18
"130",0.88125,0,21.89,0,0.624,5.637,94.7,1.9799,4,437,21.2,396.9,18.34,14.3
"131",0.34006,0,21.89,0,0.624,6.458,98.9,2.1185,4,437,21.2,395.04,12.6,19.2
"132",1.19294,0,21.89,0,0.624,6.326,97.7,2.271,4,437,21.2,396.9,12.26,19.6
"133",0.59005,0,21.89,0,0.624,6.372,97.9,2.3274,4,437,21.2,385.76,11.12,23
"134",0.32982,0,21.89,0,0.624,5.822,95.4,2.4699,4,437,21.2,388.69,15.03,18.4
"135",0.97617,0,21.89,0,0.624,5.757,98.4,2.346,4,437,21.2,262.76,17.31,15.6
"136",0.55778,0,21.89,0,0.624,6.335,98.2,2.1107,4,437,21.2,394.67,16.96,18.1
"137",0.32264,0,21.89,0,0.624,5.942,93.5,1.9669,4,437,21.2,378.25,16.9,17.4
"138",0.35233,0,21.89,0,0.624,6.454,98.4,1.8498,4,437,21.2,394.08,14.59,17.1
"139",0.2498,0,21.89,0,0.624,5.857,98.2,1.6686,4,437,21.2,392.04,21.32,13.3
"140",0.54452,0,21.89,0,0.624,6.151,97.9,1.6687,4,437,21.2,396.9,18.46,17.8
"141",0.2909,0,21.89,0,0.624,6.174,93.6,1.6119,4,437,21.2,388.08,24.16,14
"142",1.62864,0,21.89,0,0.624,5.019,100,1.4394,4,437,21.2,396.9,34.41,14.4
"143",3.32105,0,19.58,1,0.871,5.403,100,1.3216,5,403,14.7,396.9,26.82,13.4
"144",4.0974,0,19.58,0,0.871,5.468,100,1.4118,5,403,14.7,396.9,26.42,15.6
"145",2.77974,0,19.58,0,0.871,4.903,97.8,1.3459,5,403,14.7,396.9,29.29,11.8
"146",2.37934,0,19.58,0,0.871,6.13,100,1.4191,5,403,14.7,172.91,27.8,13.8
"147",2.15505,0,19.58,0,0.871,5.628,100,1.5166,5,403,14.7,169.27,16.65,15.6
"148",2.36862,0,19.58,0,0.871,4.926,95.7,1.4608,5,403,14.7,391.71,29.53,14.6
"149",2.33099,0,19.58,0,0.871,5.186,93.8,1.5296,5,403,14.7,356.99,28.32,17.8
"150",2.73397,0,19.58,0,0.871,5.597,94.9,1.5257,5,403,14.7,351.85,21.45,15.4
"151",1.6566,0,19.58,0,0.871,6.122,97.3,1.618,5,403,14.7,372.8,14.1,21.5
"152",1.49632,0,19.58,0,0.871,5.404,100,1.5916,5,403,14.7,341.6,13.28,19.6
"153",1.12658,0,19.58,1,0.871,5.012,88,1.6102,5,403,14.7,343.28,12.12,15.3
"154",2.14918,0,19.58,0,0.871,5.709,98.5,1.6232,5,403,14.7,261.95,15.79,19.4
"155",1.41385,0,19.58,1,0.871,6.129,96,1.7494,5,403,14.7,321.02,15.12,17
"156",3.53501,0,19.58,1,0.871,6.152,82.6,1.7455,5,403,14.7,88.01,15.02,15.6
"157",2.44668,0,19.58,0,0.871,5.272,94,1.7364,5,403,14.7,88.63,16.14,13.1
"158",1.22358,0,19.58,0,0.605,6.943,97.4,1.8773,5,403,14.7,363.43,4.59,41.3
"159",1.34284,0,19.58,0,0.605,6.066,100,1.7573,5,403,14.7,353.89,6.43,24.3
"160",1.42502,0,19.58,0,0.871,6.51,100,1.7659,5,403,14.7,364.31,7.39,23.3
"161",1.27346,0,19.58,1,0.605,6.25,92.6,1.7984,5,403,14.7,338.92,5.5,27
"162",1.46336,0,19.58,0,0.605,7.489,90.8,1.9709,5,403,14.7,374.43,1.73,50
"163",1.83377,0,19.58,1,0.605,7.802,98.2,2.0407,5,403,14.7,389.61,1.92,50
"164",1.51902,0,19.58,1,0.605,8.375,93.9,2.162,5,403,14.7,388.45,3.32,50
"165",2.24236,0,19.58,0,0.605,5.854,91.8,2.422,5,403,14.7,395.11,11.64,22.7
"166",2.924,0,19.58,0,0.605,6.101,93,2.2834,5,403,14.7,240.16,9.81,25
"167",2.01019,0,19.58,0,0.605,7.929,96.2,2.0459,5,403,14.7,369.3,3.7,50
"168",1.80028,0,19.58,0,0.605,5.877,79.2,2.4259,5,403,14.7,227.61,12.14,23.8
"169",2.3004,0,19.58,0,0.605,6.319,96.1,2.1,5,403,14.7,297.09,11.1,23.8
"170",2.44953,0,19.58,0,0.605,6.402,95.2,2.2625,5,403,14.7,330.04,11.32,22.3
"171",1.20742,0,19.58,0,0.605,5.875,94.6,2.4259,5,403,14.7,292.29,14.43,17.4
"172",2.3139,0,19.58,0,0.605,5.88,97.3,2.3887,5,403,14.7,348.13,12.03,19.1
"173",0.13914,0,4.05,0,0.51,5.572,88.5,2.5961,5,296,16.6,396.9,14.69,23.1
"174",0.09178,0,4.05,0,0.51,6.416,84.1,2.6463,5,296,16.6,395.5,9.04,23.6
"175",0.08447,0,4.05,0,0.51,5.859,68.7,2.7019,5,296,16.6,393.23,9.64,22.6
"176",0.06664,0,4.05,0,0.51,6.546,33.1,3.1323,5,296,16.6,390.96,5.33,29.4
"177",0.07022,0,4.05,0,0.51,6.02,47.2,3.5549,5,296,16.6,393.23,10.11,23.2
"178",0.05425,0,4.05,0,0.51,6.315,73.4,3.3175,5,296,16.6,395.6,6.29,24.6
"179",0.06642,0,4.05,0,0.51,6.86,74.4,2.9153,5,296,16.6,391.27,6.92,29.9
"180",0.0578,0,2.46,0,0.488,6.98,58.4,2.829,3,193,17.8,396.9,5.04,37.2
"181",0.06588,0,2.46,0,0.488,7.765,83.3,2.741,3,193,17.8,395.56,7.56,39.8
"182",0.06888,0,2.46,0,0.488,6.144,62.2,2.5979,3,193,17.8,396.9,9.45,36.2
"183",0.09103,0,2.46,0,0.488,7.155,92.2,2.7006,3,193,17.8,394.12,4.82,37.9
"184",0.10008,0,2.46,0,0.488,6.563,95.6,2.847,3,193,17.8,396.9,5.68,32.5
"185",0.08308,0,2.46,0,0.488,5.604,89.8,2.9879,3,193,17.8,391,13.98,26.4
"186",0.06047,0,2.46,0,0.488,6.153,68.8,3.2797,3,193,17.8,387.11,13.15,29.6
"187",0.05602,0,2.46,0,0.488,7.831,53.6,3.1992,3,193,17.8,392.63,4.45,50
"188",0.07875,45,3.44,0,0.437,6.782,41.1,3.7886,5,398,15.2,393.87,6.68,32
"189",0.12579,45,3.44,0,0.437,6.556,29.1,4.5667,5,398,15.2,382.84,4.56,29.8
"190",0.0837,45,3.44,0,0.437,7.185,38.9,4.5667,5,398,15.2,396.9,5.39,34.9
"191",0.09068,45,3.44,0,0.437,6.951,21.5,6.4798,5,398,15.2,377.68,5.1,37
"192",0.06911,45,3.44,0,0.437,6.739,30.8,6.4798,5,398,15.2,389.71,4.69,30.5
"193",0.08664,45,3.44,0,0.437,7.178,26.3,6.4798,5,398,15.2,390.49,2.87,36.4
"194",0.02187,60,2.93,0,0.401,6.8,9.9,6.2196,1,265,15.6,393.37,5.03,31.1
"195",0.01439,60,2.93,0,0.401,6.604,18.8,6.2196,1,265,15.6,376.7,4.38,29.1
"196",0.01381,80,0.46,0,0.422,7.875,32,5.6484,4,255,14.4,394.23,2.97,50
"197",0.04011,80,1.52,0,0.404,7.287,34.1,7.309,2,329,12.6,396.9,4.08,33.3
"198",0.04666,80,1.52,0,0.404,7.107,36.6,7.309,2,329,12.6,354.31,8.61,30.3
"199",0.03768,80,1.52,0,0.404,7.274,38.3,7.309,2,329,12.6,392.2,6.62,34.6
"200",0.0315,95,1.47,0,0.403,6.975,15.3,7.6534,3,402,17,396.9,4.56,34.9
"201",0.01778,95,1.47,0,0.403,7.135,13.9,7.6534,3,402,17,384.3,4.45,32.9
"202",0.03445,82.5,2.03,0,0.415,6.162,38.4,6.27,2,348,14.7,393.77,7.43,24.1
"203",0.02177,82.5,2.03,0,0.415,7.61,15.7,6.27,2,348,14.7,395.38,3.11,42.3
"204",0.0351,95,2.68,0,0.4161,7.853,33.2,5.118,4,224,14.7,392.78,3.81,48.5
"205",0.02009,95,2.68,0,0.4161,8.034,31.9,5.118,4,224,14.7,390.55,2.88,50
"206",0.13642,0,10.59,0,0.489,5.891,22.3,3.9454,4,277,18.6,396.9,10.87,22.6
"207",0.22969,0,10.59,0,0.489,6.326,52.5,4.3549,4,277,18.6,394.87,10.97,24.4
"208",0.25199,0,10.59,0,0.489,5.783,72.7,4.3549,4,277,18.6,389.43,18.06,22.5
"209",0.13587,0,10.59,1,0.489,6.064,59.1,4.2392,4,277,18.6,381.32,14.66,24.4
"210",0.43571,0,10.59,1,0.489,5.344,100,3.875,4,277,18.6,396.9,23.09,20
"211",0.17446,0,10.59,1,0.489,5.96,92.1,3.8771,4,277,18.6,393.25,17.27,21.7
"212",0.37578,0,10.59,1,0.489,5.404,88.6,3.665,4,277,18.6,395.24,23.98,19.3
"213",0.21719,0,10.59,1,0.489,5.807,53.8,3.6526,4,277,18.6,390.94,16.03,22.4
"214",0.14052,0,10.59,0,0.489,6.375,32.3,3.9454,4,277,18.6,385.81,9.38,28.1
"215",0.28955,0,10.59,0,0.489,5.412,9.8,3.5875,4,277,18.6,348.93,29.55,23.7
"216",0.19802,0,10.59,0,0.489,6.182,42.4,3.9454,4,277,18.6,393.63,9.47,25
"217",0.0456,0,13.89,1,0.55,5.888,56,3.1121,5,276,16.4,392.8,13.51,23.3
"218",0.07013,0,13.89,0,0.55,6.642,85.1,3.4211,5,276,16.4,392.78,9.69,28.7
"219",0.11069,0,13.89,1,0.55,5.951,93.8,2.8893,5,276,16.4,396.9,17.92,21.5
"220",0.11425,0,13.89,1,0.55,6.373,92.4,3.3633,5,276,16.4,393.74,10.5,23
"221",0.35809,0,6.2,1,0.507,6.951,88.5,2.8617,8,307,17.4,391.7,9.71,26.7
"222",0.40771,0,6.2,1,0.507,6.164,91.3,3.048,8,307,17.4,395.24,21.46,21.7
"223",0.62356,0,6.2,1,0.507,6.879,77.7,3.2721,8,307,17.4,390.39,9.93,27.5
"224",0.6147,0,6.2,0,0.507,6.618,80.8,3.2721,8,307,17.4,396.9,7.6,30.1
"225",0.31533,0,6.2,0,0.504,8.266,78.3,2.8944,8,307,17.4,385.05,4.14,44.8
"226",0.52693,0,6.2,0,0.504,8.725,83,2.8944,8,307,17.4,382,4.63,50
"227",0.38214,0,6.2,0,0.504,8.04,86.5,3.2157,8,307,17.4,387.38,3.13,37.6
"228",0.41238,0,6.2,0,0.504,7.163,79.9,3.2157,8,307,17.4,372.08,6.36,31.6
"229",0.29819,0,6.2,0,0.504,7.686,17,3.3751,8,307,17.4,377.51,3.92,46.7
"230",0.44178,0,6.2,0,0.504,6.552,21.4,3.3751,8,307,17.4,380.34,3.76,31.5
"231",0.537,0,6.2,0,0.504,5.981,68.1,3.6715,8,307,17.4,378.35,11.65,24.3
"232",0.46296,0,6.2,0,0.504,7.412,76.9,3.6715,8,307,17.4,376.14,5.25,31.7
"233",0.57529,0,6.2,0,0.507,8.337,73.3,3.8384,8,307,17.4,385.91,2.47,41.7
"234",0.33147,0,6.2,0,0.507,8.247,70.4,3.6519,8,307,17.4,378.95,3.95,48.3
"235",0.44791,0,6.2,1,0.507,6.726,66.5,3.6519,8,307,17.4,360.2,8.05,29
"236",0.33045,0,6.2,0,0.507,6.086,61.5,3.6519,8,307,17.4,376.75,10.88,24
"237",0.52058,0,6.2,1,0.507,6.631,76.5,4.148,8,307,17.4,388.45,9.54,25.1
"238",0.51183,0,6.2,0,0.507,7.358,71.6,4.148,8,307,17.4,390.07,4.73,31.5
"239",0.08244,30,4.93,0,0.428,6.481,18.5,6.1899,6,300,16.6,379.41,6.36,23.7
"240",0.09252,30,4.93,0,0.428,6.606,42.2,6.1899,6,300,16.6,383.78,7.37,23.3
"241",0.11329,30,4.93,0,0.428,6.897,54.3,6.3361,6,300,16.6,391.25,11.38,22
"242",0.10612,30,4.93,0,0.428,6.095,65.1,6.3361,6,300,16.6,394.62,12.4,20.1
"243",0.1029,30,4.93,0,0.428,6.358,52.9,7.0355,6,300,16.6,372.75,11.22,22.2
"244",0.12757,30,4.93,0,0.428,6.393,7.8,7.0355,6,300,16.6,374.71,5.19,23.7
"245",0.20608,22,5.86,0,0.431,5.593,76.5,7.9549,7,330,19.1,372.49,12.5,17.6
"246",0.19133,22,5.86,0,0.431,5.605,70.2,7.9549,7,330,19.1,389.13,18.46,18.5
"247",0.33983,22,5.86,0,0.431,6.108,34.9,8.0555,7,330,19.1,390.18,9.16,24.3
"248",0.19657,22,5.86,0,0.431,6.226,79.2,8.0555,7,330,19.1,376.14,10.15,20.5
"249",0.16439,22,5.86,0,0.431,6.433,49.1,7.8265,7,330,19.1,374.71,9.52,24.5
"250",0.19073,22,5.86,0,0.431,6.718,17.5,7.8265,7,330,19.1,393.74,6.56,26.2
"251",0.1403,22,5.86,0,0.431,6.487,13,7.3967,7,330,19.1,396.28,5.9,24.4
"252",0.21409,22,5.86,0,0.431,6.438,8.9,7.3967,7,330,19.1,377.07,3.59,24.8
"253",0.08221,22,5.86,0,0.431,6.957,6.8,8.9067,7,330,19.1,386.09,3.53,29.6
"254",0.36894,22,5.86,0,0.431,8.259,8.4,8.9067,7,330,19.1,396.9,3.54,42.8
"255",0.04819,80,3.64,0,0.392,6.108,32,9.2203,1,315,16.4,392.89,6.57,21.9
"256",0.03548,80,3.64,0,0.392,5.876,19.1,9.2203,1,315,16.4,395.18,9.25,20.9
"257",0.01538,90,3.75,0,0.394,7.454,34.2,6.3361,3,244,15.9,386.34,3.11,44
"258",0.61154,20,3.97,0,0.647,8.704,86.9,1.801,5,264,13,389.7,5.12,50
"259",0.66351,20,3.97,0,0.647,7.333,100,1.8946,5,264,13,383.29,7.79,36
"260",0.65665,20,3.97,0,0.647,6.842,100,2.0107,5,264,13,391.93,6.9,30.1
"261",0.54011,20,3.97,0,0.647,7.203,81.8,2.1121,5,264,13,392.8,9.59,33.8
"262",0.53412,20,3.97,0,0.647,7.52,89.4,2.1398,5,264,13,388.37,7.26,43.1
"263",0.52014,20,3.97,0,0.647,8.398,91.5,2.2885,5,264,13,386.86,5.91,48.8
"264",0.82526,20,3.97,0,0.647,7.327,94.5,2.0788,5,264,13,393.42,11.25,31
"265",0.55007,20,3.97,0,0.647,7.206,91.6,1.9301,5,264,13,387.89,8.1,36.5
"266",0.76162,20,3.97,0,0.647,5.56,62.8,1.9865,5,264,13,392.4,10.45,22.8
"267",0.7857,20,3.97,0,0.647,7.014,84.6,2.1329,5,264,13,384.07,14.79,30.7
"268",0.57834,20,3.97,0,0.575,8.297,67,2.4216,5,264,13,384.54,7.44,50
"269",0.5405,20,3.97,0,0.575,7.47,52.6,2.872,5,264,13,390.3,3.16,43.5
"270",0.09065,20,6.96,1,0.464,5.92,61.5,3.9175,3,223,18.6,391.34,13.65,20.7
"271",0.29916,20,6.96,0,0.464,5.856,42.1,4.429,3,223,18.6,388.65,13,21.1
"272",0.16211,20,6.96,0,0.464,6.24,16.3,4.429,3,223,18.6,396.9,6.59,25.2
"273",0.1146,20,6.96,0,0.464,6.538,58.7,3.9175,3,223,18.6,394.96,7.73,24.4
"274",0.22188,20,6.96,1,0.464,7.691,51.8,4.3665,3,223,18.6,390.77,6.58,35.2
"275",0.05644,40,6.41,1,0.447,6.758,32.9,4.0776,4,254,17.6,396.9,3.53,32.4
"276",0.09604,40,6.41,0,0.447,6.854,42.8,4.2673,4,254,17.6,396.9,2.98,32
"277",0.10469,40,6.41,1,0.447,7.267,49,4.7872,4,254,17.6,389.25,6.05,33.2
"278",0.06127,40,6.41,1,0.447,6.826,27.6,4.8628,4,254,17.6,393.45,4.16,33.1
"279",0.07978,40,6.41,0,0.447,6.482,32.1,4.1403,4,254,17.6,396.9,7.19,29.1
"280",0.21038,20,3.33,0,0.4429,6.812,32.2,4.1007,5,216,14.9,396.9,4.85,35.1
"281",0.03578,20,3.33,0,0.4429,7.82,64.5,4.6947,5,216,14.9,387.31,3.76,45.4
"282",0.03705,20,3.33,0,0.4429,6.968,37.2,5.2447,5,216,14.9,392.23,4.59,35.4
"283",0.06129,20,3.33,1,0.4429,7.645,49.7,5.2119,5,216,14.9,377.07,3.01,46
"284",0.01501,90,1.21,1,0.401,7.923,24.8,5.885,1,198,13.6,395.52,3.16,50
"285",0.00906,90,2.97,0,0.4,7.088,20.8,7.3073,1,285,15.3,394.72,7.85,32.2
"286",0.01096,55,2.25,0,0.389,6.453,31.9,7.3073,1,300,15.3,394.72,8.23,22
"287",0.01965,80,1.76,0,0.385,6.23,31.5,9.0892,1,241,18.2,341.6,12.93,20.1
"288",0.03871,52.5,5.32,0,0.405,6.209,31.3,7.3172,6,293,16.6,396.9,7.14,23.2
"289",0.0459,52.5,5.32,0,0.405,6.315,45.6,7.3172,6,293,16.6,396.9,7.6,22.3
"290",0.04297,52.5,5.32,0,0.405,6.565,22.9,7.3172,6,293,16.6,371.72,9.51,24.8
"291",0.03502,80,4.95,0,0.411,6.861,27.9,5.1167,4,245,19.2,396.9,3.33,28.5
"292",0.07886,80,4.95,0,0.411,7.148,27.7,5.1167,4,245,19.2,396.9,3.56,37.3
"293",0.03615,80,4.95,0,0.411,6.63,23.4,5.1167,4,245,19.2,396.9,4.7,27.9
"294",0.08265,0,13.92,0,0.437,6.127,18.4,5.5027,4,289,16,396.9,8.58,23.9
"295",0.08199,0,13.92,0,0.437,6.009,42.3,5.5027,4,289,16,396.9,10.4,21.7
"296",0.12932,0,13.92,0,0.437,6.678,31.1,5.9604,4,289,16,396.9,6.27,28.6
"297",0.05372,0,13.92,0,0.437,6.549,51,5.9604,4,289,16,392.85,7.39,27.1
"298",0.14103,0,13.92,0,0.437,5.79,58,6.32,4,289,16,396.9,15.84,20.3
"299",0.06466,70,2.24,0,0.4,6.345,20.1,7.8278,5,358,14.8,368.24,4.97,22.5
"300",0.05561,70,2.24,0,0.4,7.041,10,7.8278,5,358,14.8,371.58,4.74,29
"301",0.04417,70,2.24,0,0.4,6.871,47.4,7.8278,5,358,14.8,390.86,6.07,24.8
"302",0.03537,34,6.09,0,0.433,6.59,40.4,5.4917,7,329,16.1,395.75,9.5,22
"303",0.09266,34,6.09,0,0.433,6.495,18.4,5.4917,7,329,16.1,383.61,8.67,26.4
"304",0.1,34,6.09,0,0.433,6.982,17.7,5.4917,7,329,16.1,390.43,4.86,33.1
"305",0.05515,33,2.18,0,0.472,7.236,41.1,4.022,7,222,18.4,393.68,6.93,36.1
"306",0.05479,33,2.18,0,0.472,6.616,58.1,3.37,7,222,18.4,393.36,8.93,28.4
"307",0.07503,33,2.18,0,0.472,7.42,71.9,3.0992,7,222,18.4,396.9,6.47,33.4
"308",0.04932,33,2.18,0,0.472,6.849,70.3,3.1827,7,222,18.4,396.9,7.53,28.2
"309",0.49298,0,9.9,0,0.544,6.635,82.5,3.3175,4,304,18.4,396.9,4.54,22.8
"310",0.3494,0,9.9,0,0.544,5.972,76.7,3.1025,4,304,18.4,396.24,9.97,20.3
"311",2.63548,0,9.9,0,0.544,4.973,37.8,2.5194,4,304,18.4,350.45,12.64,16.1
"312",0.79041,0,9.9,0,0.544,6.122,52.8,2.6403,4,304,18.4,396.9,5.98,22.1
"313",0.26169,0,9.9,0,0.544,6.023,90.4,2.834,4,304,18.4,396.3,11.72,19.4
"314",0.26938,0,9.9,0,0.544,6.266,82.8,3.2628,4,304,18.4,393.39,7.9,21.6
"315",0.3692,0,9.9,0,0.544,6.567,87.3,3.6023,4,304,18.4,395.69,9.28,23.8
"316",0.25356,0,9.9,0,0.544,5.705,77.7,3.945,4,304,18.4,396.42,11.5,16.2
"317",0.31827,0,9.9,0,0.544,5.914,83.2,3.9986,4,304,18.4,390.7,18.33,17.8
"318",0.24522,0,9.9,0,0.544,5.782,71.7,4.0317,4,304,18.4,396.9,15.94,19.8
"319",0.40202,0,9.9,0,0.544,6.382,67.2,3.5325,4,304,18.4,395.21,10.36,23.1
"320",0.47547,0,9.9,0,0.544,6.113,58.8,4.0019,4,304,18.4,396.23,12.73,21
"321",0.1676,0,7.38,0,0.493,6.426,52.3,4.5404,5,287,19.6,396.9,7.2,23.8
"322",0.18159,0,7.38,0,0.493,6.376,54.3,4.5404,5,287,19.6,396.9,6.87,23.1
"323",0.35114,0,7.38,0,0.493,6.041,49.9,4.7211,5,287,19.6,396.9,7.7,20.4
"324",0.28392,0,7.38,0,0.493,5.708,74.3,4.7211,5,287,19.6,391.13,11.74,18.5
"325",0.34109,0,7.38,0,0.493,6.415,40.1,4.7211,5,287,19.6,396.9,6.12,25
"326",0.19186,0,7.38,0,0.493,6.431,14.7,5.4159,5,287,19.6,393.68,5.08,24.6
"327",0.30347,0,7.38,0,0.493,6.312,28.9,5.4159,5,287,19.6,396.9,6.15,23
"328",0.24103,0,7.38,0,0.493,6.083,43.7,5.4159,5,287,19.6,396.9,12.79,22.2
"329",0.06617,0,3.24,0,0.46,5.868,25.8,5.2146,4,430,16.9,382.44,9.97,19.3
"330",0.06724,0,3.24,0,0.46,6.333,17.2,5.2146,4,430,16.9,375.21,7.34,22.6
"331",0.04544,0,3.24,0,0.46,6.144,32.2,5.8736,4,430,16.9,368.57,9.09,19.8
"332",0.05023,35,6.06,0,0.4379,5.706,28.4,6.6407,1,304,16.9,394.02,12.43,17.1
"333",0.03466,35,6.06,0,0.4379,6.031,23.3,6.6407,1,304,16.9,362.25,7.83,19.4
"334",0.05083,0,5.19,0,0.515,6.316,38.1,6.4584,5,224,20.2,389.71,5.68,22.2
"335",0.03738,0,5.19,0,0.515,6.31,38.5,6.4584,5,224,20.2,389.4,6.75,20.7
"336",0.03961,0,5.19,0,0.515,6.037,34.5,5.9853,5,224,20.2,396.9,8.01,21.1
"337",0.03427,0,5.19,0,0.515,5.869,46.3,5.2311,5,224,20.2,396.9,9.8,19.5
"338",0.03041,0,5.19,0,0.515,5.895,59.6,5.615,5,224,20.2,394.81,10.56,18.5
"339",0.03306,0,5.19,0,0.515,6.059,37.3,4.8122,5,224,20.2,396.14,8.51,20.6
"340",0.05497,0,5.19,0,0.515,5.985,45.4,4.8122,5,224,20.2,396.9,9.74,19
"341",0.06151,0,5.19,0,0.515,5.968,58.5,4.8122,5,224,20.2,396.9,9.29,18.7
"342",0.01301,35,1.52,0,0.442,7.241,49.3,7.0379,1,284,15.5,394.74,5.49,32.7
"343",0.02498,0,1.89,0,0.518,6.54,59.7,6.2669,1,422,15.9,389.96,8.65,16.5
"344",0.02543,55,3.78,0,0.484,6.696,56.4,5.7321,5,370,17.6,396.9,7.18,23.9
"345",0.03049,55,3.78,0,0.484,6.874,28.1,6.4654,5,370,17.6,387.97,4.61,31.2
"346",0.03113,0,4.39,0,0.442,6.014,48.5,8.0136,3,352,18.8,385.64,10.53,17.5
"347",0.06162,0,4.39,0,0.442,5.898,52.3,8.0136,3,352,18.8,364.61,12.67,17.2
"348",0.0187,85,4.15,0,0.429,6.516,27.7,8.5353,4,351,17.9,392.43,6.36,23.1
"349",0.01501,80,2.01,0,0.435,6.635,29.7,8.344,4,280,17,390.94,5.99,24.5
"350",0.02899,40,1.25,0,0.429,6.939,34.5,8.7921,1,335,19.7,389.85,5.89,26.6
"351",0.06211,40,1.25,0,0.429,6.49,44.4,8.7921,1,335,19.7,396.9,5.98,22.9
"352",0.0795,60,1.69,0,0.411,6.579,35.9,10.7103,4,411,18.3,370.78,5.49,24.1
"353",0.07244,60,1.69,0,0.411,5.884,18.5,10.7103,4,411,18.3,392.33,7.79,18.6
"354",0.01709,90,2.02,0,0.41,6.728,36.1,12.1265,5,187,17,384.46,4.5,30.1
"355",0.04301,80,1.91,0,0.413,5.663,21.9,10.5857,4,334,22,382.8,8.05,18.2
"356",0.10659,80,1.91,0,0.413,5.936,19.5,10.5857,4,334,22,376.04,5.57,20.6
"357",8.98296,0,18.1,1,0.77,6.212,97.4,2.1222,24,666,20.2,377.73,17.6,17.8
"358",3.8497,0,18.1,1,0.77,6.395,91,2.5052,24,666,20.2,391.34,13.27,21.7
"359",5.20177,0,18.1,1,0.77,6.127,83.4,2.7227,24,666,20.2,395.43,11.48,22.7
"360",4.26131,0,18.1,0,0.77,6.112,81.3,2.5091,24,666,20.2,390.74,12.67,22.6
"361",4.54192,0,18.1,0,0.77,6.398,88,2.5182,24,666,20.2,374.56,7.79,25
"362",3.83684,0,18.1,0,0.77,6.251,91.1,2.2955,24,666,20.2,350.65,14.19,19.9
"363",3.67822,0,18.1,0,0.77,5.362,96.2,2.1036,24,666,20.2,380.79,10.19,20.8
"364",4.22239,0,18.1,1,0.77,5.803,89,1.9047,24,666,20.2,353.04,14.64,16.8
"365",3.47428,0,18.1,1,0.718,8.78,82.9,1.9047,24,666,20.2,354.55,5.29,21.9
"366",4.55587,0,18.1,0,0.718,3.561,87.9,1.6132,24,666,20.2,354.7,7.12,27.5
"367",3.69695,0,18.1,0,0.718,4.963,91.4,1.7523,24,666,20.2,316.03,14,21.9
"368",13.5222,0,18.1,0,0.631,3.863,100,1.5106,24,666,20.2,131.42,13.33,23.1
"369",4.89822,0,18.1,0,0.631,4.97,100,1.3325,24,666,20.2,375.52,3.26,50
"370",5.66998,0,18.1,1,0.631,6.683,96.8,1.3567,24,666,20.2,375.33,3.73,50
"371",6.53876,0,18.1,1,0.631,7.016,97.5,1.2024,24,666,20.2,392.05,2.96,50
"372",9.2323,0,18.1,0,0.631,6.216,100,1.1691,24,666,20.2,366.15,9.53,50
"373",8.26725,0,18.1,1,0.668,5.875,89.6,1.1296,24,666,20.2,347.88,8.88,50
"374",11.1081,0,18.1,0,0.668,4.906,100,1.1742,24,666,20.2,396.9,34.77,13.8
"375",18.4982,0,18.1,0,0.668,4.138,100,1.137,24,666,20.2,396.9,37.97,13.8
"376",19.6091,0,18.1,0,0.671,7.313,97.9,1.3163,24,666,20.2,396.9,13.44,15
"377",15.288,0,18.1,0,0.671,6.649,93.3,1.3449,24,666,20.2,363.02,23.24,13.9
"378",9.82349,0,18.1,0,0.671,6.794,98.8,1.358,24,666,20.2,396.9,21.24,13.3
"379",23.6482,0,18.1,0,0.671,6.38,96.2,1.3861,24,666,20.2,396.9,23.69,13.1
"380",17.8667,0,18.1,0,0.671,6.223,100,1.3861,24,666,20.2,393.74,21.78,10.2
"381",88.9762,0,18.1,0,0.671,6.968,91.9,1.4165,24,666,20.2,396.9,17.21,10.4
"382",15.8744,0,18.1,0,0.671,6.545,99.1,1.5192,24,666,20.2,396.9,21.08,10.9
"383",9.18702,0,18.1,0,0.7,5.536,100,1.5804,24,666,20.2,396.9,23.6,11.3
"384",7.99248,0,18.1,0,0.7,5.52,100,1.5331,24,666,20.2,396.9,24.56,12.3
"385",20.0849,0,18.1,0,0.7,4.368,91.2,1.4395,24,666,20.2,285.83,30.63,8.8
"386",16.8118,0,18.1,0,0.7,5.277,98.1,1.4261,24,666,20.2,396.9,30.81,7.2
"387",24.3938,0,18.1,0,0.7,4.652,100,1.4672,24,666,20.2,396.9,28.28,10.5
"388",22.5971,0,18.1,0,0.7,5,89.5,1.5184,24,666,20.2,396.9,31.99,7.4
"389",14.3337,0,18.1,0,0.7,4.88,100,1.5895,24,666,20.2,372.92,30.62,10.2
"390",8.15174,0,18.1,0,0.7,5.39,98.9,1.7281,24,666,20.2,396.9,20.85,11.5
"391",6.96215,0,18.1,0,0.7,5.713,97,1.9265,24,666,20.2,394.43,17.11,15.1
"392",5.29305,0,18.1,0,0.7,6.051,82.5,2.1678,24,666,20.2,378.38,18.76,23.2
"393",11.5779,0,18.1,0,0.7,5.036,97,1.77,24,666,20.2,396.9,25.68,9.7
"394",8.64476,0,18.1,0,0.693,6.193,92.6,1.7912,24,666,20.2,396.9,15.17,13.8
"395",13.3598,0,18.1,0,0.693,5.887,94.7,1.7821,24,666,20.2,396.9,16.35,12.7
"396",8.71675,0,18.1,0,0.693,6.471,98.8,1.7257,24,666,20.2,391.98,17.12,13.1
"397",5.87205,0,18.1,0,0.693,6.405,96,1.6768,24,666,20.2,396.9,19.37,12.5
"398",7.67202,0,18.1,0,0.693,5.747,98.9,1.6334,24,666,20.2,393.1,19.92,8.5
"399",38.3518,0,18.1,0,0.693,5.453,100,1.4896,24,666,20.2,396.9,30.59,5
"400",9.91655,0,18.1,0,0.693,5.852,77.8,1.5004,24,666,20.2,338.16,29.97,6.3
"401",25.0461,0,18.1,0,0.693,5.987,100,1.5888,24,666,20.2,396.9,26.77,5.6
"402",14.2362,0,18.1,0,0.693,6.343,100,1.5741,24,666,20.2,396.9,20.32,7.2
"403",9.59571,0,18.1,0,0.693,6.404,100,1.639,24,666,20.2,376.11,20.31,12.1
"404",24.8017,0,18.1,0,0.693,5.349,96,1.7028,24,666,20.2,396.9,19.77,8.3
"405",41.5292,0,18.1,0,0.693,5.531,85.4,1.6074,24,666,20.2,329.46,27.38,8.5
"406",67.9208,0,18.1,0,0.693,5.683,100,1.4254,24,666,20.2,384.97,22.98,5
"407",20.7162,0,18.1,0,0.659,4.138,100,1.1781,24,666,20.2,370.22,23.34,11.9
"408",11.9511,0,18.1,0,0.659,5.608,100,1.2852,24,666,20.2,332.09,12.13,27.9
"409",7.40389,0,18.1,0,0.597,5.617,97.9,1.4547,24,666,20.2,314.64,26.4,17.2
"410",14.4383,0,18.1,0,0.597,6.852,100,1.4655,24,666,20.2,179.36,19.78,27.5
"411",51.1358,0,18.1,0,0.597,5.757,100,1.413,24,666,20.2,2.6,10.11,15
"412",14.0507,0,18.1,0,0.597,6.657,100,1.5275,24,666,20.2,35.05,21.22,17.2
"413",18.811,0,18.1,0,0.597,4.628,100,1.5539,24,666,20.2,28.79,34.37,17.9
"414",28.6558,0,18.1,0,0.597,5.155,100,1.5894,24,666,20.2,210.97,20.08,16.3
"415",45.7461,0,18.1,0,0.693,4.519,100,1.6582,24,666,20.2,88.27,36.98,7
"416",18.0846,0,18.1,0,0.679,6.434,100,1.8347,24,666,20.2,27.25,29.05,7.2
"417",10.8342,0,18.1,0,0.679,6.782,90.8,1.8195,24,666,20.2,21.57,25.79,7.5
"418",25.9406,0,18.1,0,0.679,5.304,89.1,1.6475,24,666,20.2,127.36,26.64,10.4
"419",73.5341,0,18.1,0,0.679,5.957,100,1.8026,24,666,20.2,16.45,20.62,8.8
"420",11.8123,0,18.1,0,0.718,6.824,76.5,1.794,24,666,20.2,48.45,22.74,8.4
"421",11.0874,0,18.1,0,0.718,6.411,100,1.8589,24,666,20.2,318.75,15.02,16.7
"422",7.02259,0,18.1,0,0.718,6.006,95.3,1.8746,24,666,20.2,319.98,15.7,14.2
"423",12.0482,0,18.1,0,0.614,5.648,87.6,1.9512,24,666,20.2,291.55,14.1,20.8
"424",7.05042,0,18.1,0,0.614,6.103,85.1,2.0218,24,666,20.2,2.52,23.29,13.4
"425",8.79212,0,18.1,0,0.584,5.565,70.6,2.0635,24,666,20.2,3.65,17.16,11.7
"426",15.8603,0,18.1,0,0.679,5.896,95.4,1.9096,24,666,20.2,7.68,24.39,8.3
"427",12.2472,0,18.1,0,0.584,5.837,59.7,1.9976,24,666,20.2,24.65,15.69,10.2
"428",37.6619,0,18.1,0,0.679,6.202,78.7,1.8629,24,666,20.2,18.82,14.52,10.9
"429",7.36711,0,18.1,0,0.679,6.193,78.1,1.9356,24,666,20.2,96.73,21.52,11
"430",9.33889,0,18.1,0,0.679,6.38,95.6,1.9682,24,666,20.2,60.72,24.08,9.5
"431",8.49213,0,18.1,0,0.584,6.348,86.1,2.0527,24,666,20.2,83.45,17.64,14.5
"432",10.0623,0,18.1,0,0.584,6.833,94.3,2.0882,24,666,20.2,81.33,19.69,14.1
"433",6.44405,0,18.1,0,0.584,6.425,74.8,2.2004,24,666,20.2,97.95,12.03,16.1
"434",5.58107,0,18.1,0,0.713,6.436,87.9,2.3158,24,666,20.2,100.19,16.22,14.3
"435",13.9134,0,18.1,0,0.713,6.208,95,2.2222,24,666,20.2,100.63,15.17,11.7
"436",11.1604,0,18.1,0,0.74,6.629,94.6,2.1247,24,666,20.2,109.85,23.27,13.4
"437",14.4208,0,18.1,0,0.74,6.461,93.3,2.0026,24,666,20.2,27.49,18.05,9.6
"438",15.1772,0,18.1,0,0.74,6.152,100,1.9142,24,666,20.2,9.32,26.45,8.7
"439",13.6781,0,18.1,0,0.74,5.935,87.9,1.8206,24,666,20.2,68.95,34.02,8.4
"440",9.39063,0,18.1,0,0.74,5.627,93.9,1.8172,24,666,20.2,396.9,22.88,12.8
"441",22.0511,0,18.1,0,0.74,5.818,92.4,1.8662,24,666,20.2,391.45,22.11,10.5
"442",9.72418,0,18.1,0,0.74,6.406,97.2,2.0651,24,666,20.2,385.96,19.52,17.1
"443",5.66637,0,18.1,0,0.74,6.219,100,2.0048,24,666,20.2,395.69,16.59,18.4
"444",9.96654,0,18.1,0,0.74,6.485,100,1.9784,24,666,20.2,386.73,18.85,15.4
"445",12.8023,0,18.1,0,0.74,5.854,96.6,1.8956,24,666,20.2,240.52,23.79,10.8
"446",10.6718,0,18.1,0,0.74,6.459,94.8,1.9879,24,666,20.2,43.06,23.98,11.8
"447",6.28807,0,18.1,0,0.74,6.341,96.4,2.072,24,666,20.2,318.01,17.79,14.9
"448",9.92485,0,18.1,0,0.74,6.251,96.6,2.198,24,666,20.2,388.52,16.44,12.6
"449",9.32909,0,18.1,0,0.713,6.185,98.7,2.2616,24,666,20.2,396.9,18.13,14.1
"450",7.52601,0,18.1,0,0.713,6.417,98.3,2.185,24,666,20.2,304.21,19.31,13
"451",6.71772,0,18.1,0,0.713,6.749,92.6,2.3236,24,666,20.2,0.32,17.44,13.4
"452",5.44114,0,18.1,0,0.713,6.655,98.2,2.3552,24,666,20.2,355.29,17.73,15.2
"453",5.09017,0,18.1,0,0.713,6.297,91.8,2.3682,24,666,20.2,385.09,17.27,16.1
"454",8.24809,0,18.1,0,0.713,7.393,99.3,2.4527,24,666,20.2,375.87,16.74,17.8
"455",9.51363,0,18.1,0,0.713,6.728,94.1,2.4961,24,666,20.2,6.68,18.71,14.9
"456",4.75237,0,18.1,0,0.713,6.525,86.5,2.4358,24,666,20.2,50.92,18.13,14.1
"457",4.66883,0,18.1,0,0.713,5.976,87.9,2.5806,24,666,20.2,10.48,19.01,12.7
"458",8.20058,0,18.1,0,0.713,5.936,80.3,2.7792,24,666,20.2,3.5,16.94,13.5
"459",7.75223,0,18.1,0,0.713,6.301,83.7,2.7831,24,666,20.2,272.21,16.23,14.9
"460",6.80117,0,18.1,0,0.713,6.081,84.4,2.7175,24,666,20.2,396.9,14.7,20
"461",4.81213,0,18.1,0,0.713,6.701,90,2.5975,24,666,20.2,255.23,16.42,16.4
"462",3.69311,0,18.1,0,0.713,6.376,88.4,2.5671,24,666,20.2,391.43,14.65,17.7
"463",6.65492,0,18.1,0,0.713,6.317,83,2.7344,24,666,20.2,396.9,13.99,19.5
"464",5.82115,0,18.1,0,0.713,6.513,89.9,2.8016,24,666,20.2,393.82,10.29,20.2
"465",7.83932,0,18.1,0,0.655,6.209,65.4,2.9634,24,666,20.2,396.9,13.22,21.4
"466",3.1636,0,18.1,0,0.655,5.759,48.2,3.0665,24,666,20.2,334.4,14.13,19.9
"467",3.77498,0,18.1,0,0.655,5.952,84.7,2.8715,24,666,20.2,22.01,17.15,19
"468",4.42228,0,18.1,0,0.584,6.003,94.5,2.5403,24,666,20.2,331.29,21.32,19.1
"469",15.5757,0,18.1,0,0.58,5.926,71,2.9084,24,666,20.2,368.74,18.13,19.1
"470",13.0751,0,18.1,0,0.58,5.713,56.7,2.8237,24,666,20.2,396.9,14.76,20.1
"471",4.34879,0,18.1,0,0.58,6.167,84,3.0334,24,666,20.2,396.9,16.29,19.9
"472",4.03841,0,18.1,0,0.532,6.229,90.7,3.0993,24,666,20.2,395.33,12.87,19.6
"473",3.56868,0,18.1,0,0.58,6.437,75,2.8965,24,666,20.2,393.37,14.36,23.2
"474",4.64689,0,18.1,0,0.614,6.98,67.6,2.5329,24,666,20.2,374.68,11.66,29.8
"475",8.05579,0,18.1,0,0.584,5.427,95.4,2.4298,24,666,20.2,352.58,18.14,13.8
"476",6.39312,0,18.1,0,0.584,6.162,97.4,2.206,24,666,20.2,302.76,24.1,13.3
"477",4.87141,0,18.1,0,0.614,6.484,93.6,2.3053,24,666,20.2,396.21,18.68,16.7
"478",15.0234,0,18.1,0,0.614,5.304,97.3,2.1007,24,666,20.2,349.48,24.91,12
"479",10.233,0,18.1,0,0.614,6.185,96.7,2.1705,24,666,20.2,379.7,18.03,14.6
"480",14.3337,0,18.1,0,0.614,6.229,88,1.9512,24,666,20.2,383.32,13.11,21.4
"481",5.82401,0,18.1,0,0.532,6.242,64.7,3.4242,24,666,20.2,396.9,10.74,23
"482",5.70818,0,18.1,0,0.532,6.75,74.9,3.3317,24,666,20.2,393.07,7.74,23.7
"483",5.73116,0,18.1,0,0.532,7.061,77,3.4106,24,666,20.2,395.28,7.01,25
"484",2.81838,0,18.1,0,0.532,5.762,40.3,4.0983,24,666,20.2,392.92,10.42,21.8
"485",2.37857,0,18.1,0,0.583,5.871,41.9,3.724,24,666,20.2,370.73,13.34,20.6
"486",3.67367,0,18.1,0,0.583,6.312,51.9,3.9917,24,666,20.2,388.62,10.58,21.2
"487",5.69175,0,18.1,0,0.583,6.114,79.8,3.5459,24,666,20.2,392.68,14.98,19.1
"488",4.83567,0,18.1,0,0.583,5.905,53.2,3.1523,24,666,20.2,388.22,11.45,20.6
"489",0.15086,0,27.74,0,0.609,5.454,92.7,1.8209,4,711,20.1,395.09,18.06,15.2
"490",0.18337,0,27.74,0,0.609,5.414,98.3,1.7554,4,711,20.1,344.05,23.97,7
"491",0.20746,0,27.74,0,0.609,5.093,98,1.8226,4,711,20.1,318.43,29.68,8.1
"492",0.10574,0,27.74,0,0.609,5.983,98.8,1.8681,4,711,20.1,390.11,18.07,13.6
"493",0.11132,0,27.74,0,0.609,5.983,83.5,2.1099,4,711,20.1,396.9,13.35,20.1
"494",0.17331,0,9.69,0,0.585,5.707,54,2.3817,6,391,19.2,396.9,12.01,21.8
"495",0.27957,0,9.69,0,0.585,5.926,42.6,2.3817,6,391,19.2,396.9,13.59,24.5
"496",0.17899,0,9.69,0,0.585,5.67,28.8,2.7986,6,391,19.2,393.29,17.6,23.1
"497",0.2896,0,9.69,0,0.585,5.39,72.9,2.7986,6,391,19.2,396.9,21.14,19.7
"498",0.26838,0,9.69,0,0.585,5.794,70.6,2.8927,6,391,19.2,396.9,14.1,18.3
"499",0.23912,0,9.69,0,0.585,6.019,65.3,2.4091,6,391,19.2,396.9,12.92,21.2
"500",0.17783,0,9.69,0,0.585,5.569,73.5,2.3999,6,391,19.2,395.77,15.1,17.5
"501",0.22438,0,9.69,0,0.585,6.027,79.7,2.4982,6,391,19.2,396.9,14.33,16.8
"502",0.06263,0,11.93,0,0.573,6.593,69.1,2.4786,1,273,21,391.99,9.67,22.4
"503",0.04527,0,11.93,0,0.573,6.12,76.7,2.2875,1,273,21,396.9,9.08,20.6
"504",0.06076,0,11.93,0,0.573,6.976,91,2.1675,1,273,21,396.9,5.64,23.9
"505",0.10959,0,11.93,0,0.573,6.794,89.3,2.3889,1,273,21,393.45,6.48,22
"506",0.04741,0,11.93,0,0.573,6.03,80.8,2.505,1,273,21,396.9,7.88,11.9
1 crim zn indus chas nox rm age dis rad tax ptratio black lstat medv
2 1 0.00632 18 2.31 0 0.538 6.575 65.2 4.09 1 296 15.3 396.9 4.98 24
3 2 0.02731 0 7.07 0 0.469 6.421 78.9 4.9671 2 242 17.8 396.9 9.14 21.6
4 3 0.02729 0 7.07 0 0.469 7.185 61.1 4.9671 2 242 17.8 392.83 4.03 34.7
5 4 0.03237 0 2.18 0 0.458 6.998 45.8 6.0622 3 222 18.7 394.63 2.94 33.4
6 5 0.06905 0 2.18 0 0.458 7.147 54.2 6.0622 3 222 18.7 396.9 5.33 36.2
7 6 0.02985 0 2.18 0 0.458 6.43 58.7 6.0622 3 222 18.7 394.12 5.21 28.7
8 7 0.08829 12.5 7.87 0 0.524 6.012 66.6 5.5605 5 311 15.2 395.6 12.43 22.9
9 8 0.14455 12.5 7.87 0 0.524 6.172 96.1 5.9505 5 311 15.2 396.9 19.15 27.1
10 9 0.21124 12.5 7.87 0 0.524 5.631 100 6.0821 5 311 15.2 386.63 29.93 16.5
11 10 0.17004 12.5 7.87 0 0.524 6.004 85.9 6.5921 5 311 15.2 386.71 17.1 18.9
12 11 0.22489 12.5 7.87 0 0.524 6.377 94.3 6.3467 5 311 15.2 392.52 20.45 15
13 12 0.11747 12.5 7.87 0 0.524 6.009 82.9 6.2267 5 311 15.2 396.9 13.27 18.9
14 13 0.09378 12.5 7.87 0 0.524 5.889 39 5.4509 5 311 15.2 390.5 15.71 21.7
15 14 0.62976 0 8.14 0 0.538 5.949 61.8 4.7075 4 307 21 396.9 8.26 20.4
16 15 0.63796 0 8.14 0 0.538 6.096 84.5 4.4619 4 307 21 380.02 10.26 18.2
17 16 0.62739 0 8.14 0 0.538 5.834 56.5 4.4986 4 307 21 395.62 8.47 19.9
18 17 1.05393 0 8.14 0 0.538 5.935 29.3 4.4986 4 307 21 386.85 6.58 23.1
19 18 0.7842 0 8.14 0 0.538 5.99 81.7 4.2579 4 307 21 386.75 14.67 17.5
20 19 0.80271 0 8.14 0 0.538 5.456 36.6 3.7965 4 307 21 288.99 11.69 20.2
21 20 0.7258 0 8.14 0 0.538 5.727 69.5 3.7965 4 307 21 390.95 11.28 18.2
22 21 1.25179 0 8.14 0 0.538 5.57 98.1 3.7979 4 307 21 376.57 21.02 13.6
23 22 0.85204 0 8.14 0 0.538 5.965 89.2 4.0123 4 307 21 392.53 13.83 19.6
24 23 1.23247 0 8.14 0 0.538 6.142 91.7 3.9769 4 307 21 396.9 18.72 15.2
25 24 0.98843 0 8.14 0 0.538 5.813 100 4.0952 4 307 21 394.54 19.88 14.5
26 25 0.75026 0 8.14 0 0.538 5.924 94.1 4.3996 4 307 21 394.33 16.3 15.6
27 26 0.84054 0 8.14 0 0.538 5.599 85.7 4.4546 4 307 21 303.42 16.51 13.9
28 27 0.67191 0 8.14 0 0.538 5.813 90.3 4.682 4 307 21 376.88 14.81 16.6
29 28 0.95577 0 8.14 0 0.538 6.047 88.8 4.4534 4 307 21 306.38 17.28 14.8
30 29 0.77299 0 8.14 0 0.538 6.495 94.4 4.4547 4 307 21 387.94 12.8 18.4
31 30 1.00245 0 8.14 0 0.538 6.674 87.3 4.239 4 307 21 380.23 11.98 21
32 31 1.13081 0 8.14 0 0.538 5.713 94.1 4.233 4 307 21 360.17 22.6 12.7
33 32 1.35472 0 8.14 0 0.538 6.072 100 4.175 4 307 21 376.73 13.04 14.5
34 33 1.38799 0 8.14 0 0.538 5.95 82 3.99 4 307 21 232.6 27.71 13.2
35 34 1.15172 0 8.14 0 0.538 5.701 95 3.7872 4 307 21 358.77 18.35 13.1
36 35 1.61282 0 8.14 0 0.538 6.096 96.9 3.7598 4 307 21 248.31 20.34 13.5
37 36 0.06417 0 5.96 0 0.499 5.933 68.2 3.3603 5 279 19.2 396.9 9.68 18.9
38 37 0.09744 0 5.96 0 0.499 5.841 61.4 3.3779 5 279 19.2 377.56 11.41 20
39 38 0.08014 0 5.96 0 0.499 5.85 41.5 3.9342 5 279 19.2 396.9 8.77 21
40 39 0.17505 0 5.96 0 0.499 5.966 30.2 3.8473 5 279 19.2 393.43 10.13 24.7
41 40 0.02763 75 2.95 0 0.428 6.595 21.8 5.4011 3 252 18.3 395.63 4.32 30.8
42 41 0.03359 75 2.95 0 0.428 7.024 15.8 5.4011 3 252 18.3 395.62 1.98 34.9
43 42 0.12744 0 6.91 0 0.448 6.77 2.9 5.7209 3 233 17.9 385.41 4.84 26.6
44 43 0.1415 0 6.91 0 0.448 6.169 6.6 5.7209 3 233 17.9 383.37 5.81 25.3
45 44 0.15936 0 6.91 0 0.448 6.211 6.5 5.7209 3 233 17.9 394.46 7.44 24.7
46 45 0.12269 0 6.91 0 0.448 6.069 40 5.7209 3 233 17.9 389.39 9.55 21.2
47 46 0.17142 0 6.91 0 0.448 5.682 33.8 5.1004 3 233 17.9 396.9 10.21 19.3
48 47 0.18836 0 6.91 0 0.448 5.786 33.3 5.1004 3 233 17.9 396.9 14.15 20
49 48 0.22927 0 6.91 0 0.448 6.03 85.5 5.6894 3 233 17.9 392.74 18.8 16.6
50 49 0.25387 0 6.91 0 0.448 5.399 95.3 5.87 3 233 17.9 396.9 30.81 14.4
51 50 0.21977 0 6.91 0 0.448 5.602 62 6.0877 3 233 17.9 396.9 16.2 19.4
52 51 0.08873 21 5.64 0 0.439 5.963 45.7 6.8147 4 243 16.8 395.56 13.45 19.7
53 52 0.04337 21 5.64 0 0.439 6.115 63 6.8147 4 243 16.8 393.97 9.43 20.5
54 53 0.0536 21 5.64 0 0.439 6.511 21.1 6.8147 4 243 16.8 396.9 5.28 25
55 54 0.04981 21 5.64 0 0.439 5.998 21.4 6.8147 4 243 16.8 396.9 8.43 23.4
56 55 0.0136 75 4 0 0.41 5.888 47.6 7.3197 3 469 21.1 396.9 14.8 18.9
57 56 0.01311 90 1.22 0 0.403 7.249 21.9 8.6966 5 226 17.9 395.93 4.81 35.4
58 57 0.02055 85 0.74 0 0.41 6.383 35.7 9.1876 2 313 17.3 396.9 5.77 24.7
59 58 0.01432 100 1.32 0 0.411 6.816 40.5 8.3248 5 256 15.1 392.9 3.95 31.6
60 59 0.15445 25 5.13 0 0.453 6.145 29.2 7.8148 8 284 19.7 390.68 6.86 23.3
61 60 0.10328 25 5.13 0 0.453 5.927 47.2 6.932 8 284 19.7 396.9 9.22 19.6
62 61 0.14932 25 5.13 0 0.453 5.741 66.2 7.2254 8 284 19.7 395.11 13.15 18.7
63 62 0.17171 25 5.13 0 0.453 5.966 93.4 6.8185 8 284 19.7 378.08 14.44 16
64 63 0.11027 25 5.13 0 0.453 6.456 67.8 7.2255 8 284 19.7 396.9 6.73 22.2
65 64 0.1265 25 5.13 0 0.453 6.762 43.4 7.9809 8 284 19.7 395.58 9.5 25
66 65 0.01951 17.5 1.38 0 0.4161 7.104 59.5 9.2229 3 216 18.6 393.24 8.05 33
67 66 0.03584 80 3.37 0 0.398 6.29 17.8 6.6115 4 337 16.1 396.9 4.67 23.5
68 67 0.04379 80 3.37 0 0.398 5.787 31.1 6.6115 4 337 16.1 396.9 10.24 19.4
69 68 0.05789 12.5 6.07 0 0.409 5.878 21.4 6.498 4 345 18.9 396.21 8.1 22
70 69 0.13554 12.5 6.07 0 0.409 5.594 36.8 6.498 4 345 18.9 396.9 13.09 17.4
71 70 0.12816 12.5 6.07 0 0.409 5.885 33 6.498 4 345 18.9 396.9 8.79 20.9
72 71 0.08826 0 10.81 0 0.413 6.417 6.6 5.2873 4 305 19.2 383.73 6.72 24.2
73 72 0.15876 0 10.81 0 0.413 5.961 17.5 5.2873 4 305 19.2 376.94 9.88 21.7
74 73 0.09164 0 10.81 0 0.413 6.065 7.8 5.2873 4 305 19.2 390.91 5.52 22.8
75 74 0.19539 0 10.81 0 0.413 6.245 6.2 5.2873 4 305 19.2 377.17 7.54 23.4
76 75 0.07896 0 12.83 0 0.437 6.273 6 4.2515 5 398 18.7 394.92 6.78 24.1
77 76 0.09512 0 12.83 0 0.437 6.286 45 4.5026 5 398 18.7 383.23 8.94 21.4
78 77 0.10153 0 12.83 0 0.437 6.279 74.5 4.0522 5 398 18.7 373.66 11.97 20
79 78 0.08707 0 12.83 0 0.437 6.14 45.8 4.0905 5 398 18.7 386.96 10.27 20.8
80 79 0.05646 0 12.83 0 0.437 6.232 53.7 5.0141 5 398 18.7 386.4 12.34 21.2
81 80 0.08387 0 12.83 0 0.437 5.874 36.6 4.5026 5 398 18.7 396.06 9.1 20.3
82 81 0.04113 25 4.86 0 0.426 6.727 33.5 5.4007 4 281 19 396.9 5.29 28
83 82 0.04462 25 4.86 0 0.426 6.619 70.4 5.4007 4 281 19 395.63 7.22 23.9
84 83 0.03659 25 4.86 0 0.426 6.302 32.2 5.4007 4 281 19 396.9 6.72 24.8
85 84 0.03551 25 4.86 0 0.426 6.167 46.7 5.4007 4 281 19 390.64 7.51 22.9
86 85 0.05059 0 4.49 0 0.449 6.389 48 4.7794 3 247 18.5 396.9 9.62 23.9
87 86 0.05735 0 4.49 0 0.449 6.63 56.1 4.4377 3 247 18.5 392.3 6.53 26.6
88 87 0.05188 0 4.49 0 0.449 6.015 45.1 4.4272 3 247 18.5 395.99 12.86 22.5
89 88 0.07151 0 4.49 0 0.449 6.121 56.8 3.7476 3 247 18.5 395.15 8.44 22.2
90 89 0.0566 0 3.41 0 0.489 7.007 86.3 3.4217 2 270 17.8 396.9 5.5 23.6
91 90 0.05302 0 3.41 0 0.489 7.079 63.1 3.4145 2 270 17.8 396.06 5.7 28.7
92 91 0.04684 0 3.41 0 0.489 6.417 66.1 3.0923 2 270 17.8 392.18 8.81 22.6
93 92 0.03932 0 3.41 0 0.489 6.405 73.9 3.0921 2 270 17.8 393.55 8.2 22
94 93 0.04203 28 15.04 0 0.464 6.442 53.6 3.6659 4 270 18.2 395.01 8.16 22.9
95 94 0.02875 28 15.04 0 0.464 6.211 28.9 3.6659 4 270 18.2 396.33 6.21 25
96 95 0.04294 28 15.04 0 0.464 6.249 77.3 3.615 4 270 18.2 396.9 10.59 20.6
97 96 0.12204 0 2.89 0 0.445 6.625 57.8 3.4952 2 276 18 357.98 6.65 28.4
98 97 0.11504 0 2.89 0 0.445 6.163 69.6 3.4952 2 276 18 391.83 11.34 21.4
99 98 0.12083 0 2.89 0 0.445 8.069 76 3.4952 2 276 18 396.9 4.21 38.7
100 99 0.08187 0 2.89 0 0.445 7.82 36.9 3.4952 2 276 18 393.53 3.57 43.8
101 100 0.0686 0 2.89 0 0.445 7.416 62.5 3.4952 2 276 18 396.9 6.19 33.2
102 101 0.14866 0 8.56 0 0.52 6.727 79.9 2.7778 5 384 20.9 394.76 9.42 27.5
103 102 0.11432 0 8.56 0 0.52 6.781 71.3 2.8561 5 384 20.9 395.58 7.67 26.5
104 103 0.22876 0 8.56 0 0.52 6.405 85.4 2.7147 5 384 20.9 70.8 10.63 18.6
105 104 0.21161 0 8.56 0 0.52 6.137 87.4 2.7147 5 384 20.9 394.47 13.44 19.3
106 105 0.1396 0 8.56 0 0.52 6.167 90 2.421 5 384 20.9 392.69 12.33 20.1
107 106 0.13262 0 8.56 0 0.52 5.851 96.7 2.1069 5 384 20.9 394.05 16.47 19.5
108 107 0.1712 0 8.56 0 0.52 5.836 91.9 2.211 5 384 20.9 395.67 18.66 19.5
109 108 0.13117 0 8.56 0 0.52 6.127 85.2 2.1224 5 384 20.9 387.69 14.09 20.4
110 109 0.12802 0 8.56 0 0.52 6.474 97.1 2.4329 5 384 20.9 395.24 12.27 19.8
111 110 0.26363 0 8.56 0 0.52 6.229 91.2 2.5451 5 384 20.9 391.23 15.55 19.4
112 111 0.10793 0 8.56 0 0.52 6.195 54.4 2.7778 5 384 20.9 393.49 13 21.7
113 112 0.10084 0 10.01 0 0.547 6.715 81.6 2.6775 6 432 17.8 395.59 10.16 22.8
114 113 0.12329 0 10.01 0 0.547 5.913 92.9 2.3534 6 432 17.8 394.95 16.21 18.8
115 114 0.22212 0 10.01 0 0.547 6.092 95.4 2.548 6 432 17.8 396.9 17.09 18.7
116 115 0.14231 0 10.01 0 0.547 6.254 84.2 2.2565 6 432 17.8 388.74 10.45 18.5
117 116 0.17134 0 10.01 0 0.547 5.928 88.2 2.4631 6 432 17.8 344.91 15.76 18.3
118 117 0.13158 0 10.01 0 0.547 6.176 72.5 2.7301 6 432 17.8 393.3 12.04 21.2
119 118 0.15098 0 10.01 0 0.547 6.021 82.6 2.7474 6 432 17.8 394.51 10.3 19.2
120 119 0.13058 0 10.01 0 0.547 5.872 73.1 2.4775 6 432 17.8 338.63 15.37 20.4
121 120 0.14476 0 10.01 0 0.547 5.731 65.2 2.7592 6 432 17.8 391.5 13.61 19.3
122 121 0.06899 0 25.65 0 0.581 5.87 69.7 2.2577 2 188 19.1 389.15 14.37 22
123 122 0.07165 0 25.65 0 0.581 6.004 84.1 2.1974 2 188 19.1 377.67 14.27 20.3
124 123 0.09299 0 25.65 0 0.581 5.961 92.9 2.0869 2 188 19.1 378.09 17.93 20.5
125 124 0.15038 0 25.65 0 0.581 5.856 97 1.9444 2 188 19.1 370.31 25.41 17.3
126 125 0.09849 0 25.65 0 0.581 5.879 95.8 2.0063 2 188 19.1 379.38 17.58 18.8
127 126 0.16902 0 25.65 0 0.581 5.986 88.4 1.9929 2 188 19.1 385.02 14.81 21.4
128 127 0.38735 0 25.65 0 0.581 5.613 95.6 1.7572 2 188 19.1 359.29 27.26 15.7
129 128 0.25915 0 21.89 0 0.624 5.693 96 1.7883 4 437 21.2 392.11 17.19 16.2
130 129 0.32543 0 21.89 0 0.624 6.431 98.8 1.8125 4 437 21.2 396.9 15.39 18
131 130 0.88125 0 21.89 0 0.624 5.637 94.7 1.9799 4 437 21.2 396.9 18.34 14.3
132 131 0.34006 0 21.89 0 0.624 6.458 98.9 2.1185 4 437 21.2 395.04 12.6 19.2
133 132 1.19294 0 21.89 0 0.624 6.326 97.7 2.271 4 437 21.2 396.9 12.26 19.6
134 133 0.59005 0 21.89 0 0.624 6.372 97.9 2.3274 4 437 21.2 385.76 11.12 23
135 134 0.32982 0 21.89 0 0.624 5.822 95.4 2.4699 4 437 21.2 388.69 15.03 18.4
136 135 0.97617 0 21.89 0 0.624 5.757 98.4 2.346 4 437 21.2 262.76 17.31 15.6
137 136 0.55778 0 21.89 0 0.624 6.335 98.2 2.1107 4 437 21.2 394.67 16.96 18.1
138 137 0.32264 0 21.89 0 0.624 5.942 93.5 1.9669 4 437 21.2 378.25 16.9 17.4
139 138 0.35233 0 21.89 0 0.624 6.454 98.4 1.8498 4 437 21.2 394.08 14.59 17.1
140 139 0.2498 0 21.89 0 0.624 5.857 98.2 1.6686 4 437 21.2 392.04 21.32 13.3
141 140 0.54452 0 21.89 0 0.624 6.151 97.9 1.6687 4 437 21.2 396.9 18.46 17.8
142 141 0.2909 0 21.89 0 0.624 6.174 93.6 1.6119 4 437 21.2 388.08 24.16 14
143 142 1.62864 0 21.89 0 0.624 5.019 100 1.4394 4 437 21.2 396.9 34.41 14.4
144 143 3.32105 0 19.58 1 0.871 5.403 100 1.3216 5 403 14.7 396.9 26.82 13.4
145 144 4.0974 0 19.58 0 0.871 5.468 100 1.4118 5 403 14.7 396.9 26.42 15.6
146 145 2.77974 0 19.58 0 0.871 4.903 97.8 1.3459 5 403 14.7 396.9 29.29 11.8
147 146 2.37934 0 19.58 0 0.871 6.13 100 1.4191 5 403 14.7 172.91 27.8 13.8
148 147 2.15505 0 19.58 0 0.871 5.628 100 1.5166 5 403 14.7 169.27 16.65 15.6
149 148 2.36862 0 19.58 0 0.871 4.926 95.7 1.4608 5 403 14.7 391.71 29.53 14.6
150 149 2.33099 0 19.58 0 0.871 5.186 93.8 1.5296 5 403 14.7 356.99 28.32 17.8
151 150 2.73397 0 19.58 0 0.871 5.597 94.9 1.5257 5 403 14.7 351.85 21.45 15.4
152 151 1.6566 0 19.58 0 0.871 6.122 97.3 1.618 5 403 14.7 372.8 14.1 21.5
153 152 1.49632 0 19.58 0 0.871 5.404 100 1.5916 5 403 14.7 341.6 13.28 19.6
154 153 1.12658 0 19.58 1 0.871 5.012 88 1.6102 5 403 14.7 343.28 12.12 15.3
155 154 2.14918 0 19.58 0 0.871 5.709 98.5 1.6232 5 403 14.7 261.95 15.79 19.4
156 155 1.41385 0 19.58 1 0.871 6.129 96 1.7494 5 403 14.7 321.02 15.12 17
157 156 3.53501 0 19.58 1 0.871 6.152 82.6 1.7455 5 403 14.7 88.01 15.02 15.6
158 157 2.44668 0 19.58 0 0.871 5.272 94 1.7364 5 403 14.7 88.63 16.14 13.1
159 158 1.22358 0 19.58 0 0.605 6.943 97.4 1.8773 5 403 14.7 363.43 4.59 41.3
160 159 1.34284 0 19.58 0 0.605 6.066 100 1.7573 5 403 14.7 353.89 6.43 24.3
161 160 1.42502 0 19.58 0 0.871 6.51 100 1.7659 5 403 14.7 364.31 7.39 23.3
162 161 1.27346 0 19.58 1 0.605 6.25 92.6 1.7984 5 403 14.7 338.92 5.5 27
163 162 1.46336 0 19.58 0 0.605 7.489 90.8 1.9709 5 403 14.7 374.43 1.73 50
164 163 1.83377 0 19.58 1 0.605 7.802 98.2 2.0407 5 403 14.7 389.61 1.92 50
165 164 1.51902 0 19.58 1 0.605 8.375 93.9 2.162 5 403 14.7 388.45 3.32 50
166 165 2.24236 0 19.58 0 0.605 5.854 91.8 2.422 5 403 14.7 395.11 11.64 22.7
167 166 2.924 0 19.58 0 0.605 6.101 93 2.2834 5 403 14.7 240.16 9.81 25
168 167 2.01019 0 19.58 0 0.605 7.929 96.2 2.0459 5 403 14.7 369.3 3.7 50
169 168 1.80028 0 19.58 0 0.605 5.877 79.2 2.4259 5 403 14.7 227.61 12.14 23.8
170 169 2.3004 0 19.58 0 0.605 6.319 96.1 2.1 5 403 14.7 297.09 11.1 23.8
171 170 2.44953 0 19.58 0 0.605 6.402 95.2 2.2625 5 403 14.7 330.04 11.32 22.3
172 171 1.20742 0 19.58 0 0.605 5.875 94.6 2.4259 5 403 14.7 292.29 14.43 17.4
173 172 2.3139 0 19.58 0 0.605 5.88 97.3 2.3887 5 403 14.7 348.13 12.03 19.1
174 173 0.13914 0 4.05 0 0.51 5.572 88.5 2.5961 5 296 16.6 396.9 14.69 23.1
175 174 0.09178 0 4.05 0 0.51 6.416 84.1 2.6463 5 296 16.6 395.5 9.04 23.6
176 175 0.08447 0 4.05 0 0.51 5.859 68.7 2.7019 5 296 16.6 393.23 9.64 22.6
177 176 0.06664 0 4.05 0 0.51 6.546 33.1 3.1323 5 296 16.6 390.96 5.33 29.4
178 177 0.07022 0 4.05 0 0.51 6.02 47.2 3.5549 5 296 16.6 393.23 10.11 23.2
179 178 0.05425 0 4.05 0 0.51 6.315 73.4 3.3175 5 296 16.6 395.6 6.29 24.6
180 179 0.06642 0 4.05 0 0.51 6.86 74.4 2.9153 5 296 16.6 391.27 6.92 29.9
181 180 0.0578 0 2.46 0 0.488 6.98 58.4 2.829 3 193 17.8 396.9 5.04 37.2
182 181 0.06588 0 2.46 0 0.488 7.765 83.3 2.741 3 193 17.8 395.56 7.56 39.8
183 182 0.06888 0 2.46 0 0.488 6.144 62.2 2.5979 3 193 17.8 396.9 9.45 36.2
184 183 0.09103 0 2.46 0 0.488 7.155 92.2 2.7006 3 193 17.8 394.12 4.82 37.9
185 184 0.10008 0 2.46 0 0.488 6.563 95.6 2.847 3 193 17.8 396.9 5.68 32.5
186 185 0.08308 0 2.46 0 0.488 5.604 89.8 2.9879 3 193 17.8 391 13.98 26.4
187 186 0.06047 0 2.46 0 0.488 6.153 68.8 3.2797 3 193 17.8 387.11 13.15 29.6
188 187 0.05602 0 2.46 0 0.488 7.831 53.6 3.1992 3 193 17.8 392.63 4.45 50
189 188 0.07875 45 3.44 0 0.437 6.782 41.1 3.7886 5 398 15.2 393.87 6.68 32
190 189 0.12579 45 3.44 0 0.437 6.556 29.1 4.5667 5 398 15.2 382.84 4.56 29.8
191 190 0.0837 45 3.44 0 0.437 7.185 38.9 4.5667 5 398 15.2 396.9 5.39 34.9
192 191 0.09068 45 3.44 0 0.437 6.951 21.5 6.4798 5 398 15.2 377.68 5.1 37
193 192 0.06911 45 3.44 0 0.437 6.739 30.8 6.4798 5 398 15.2 389.71 4.69 30.5
194 193 0.08664 45 3.44 0 0.437 7.178 26.3 6.4798 5 398 15.2 390.49 2.87 36.4
195 194 0.02187 60 2.93 0 0.401 6.8 9.9 6.2196 1 265 15.6 393.37 5.03 31.1
196 195 0.01439 60 2.93 0 0.401 6.604 18.8 6.2196 1 265 15.6 376.7 4.38 29.1
197 196 0.01381 80 0.46 0 0.422 7.875 32 5.6484 4 255 14.4 394.23 2.97 50
198 197 0.04011 80 1.52 0 0.404 7.287 34.1 7.309 2 329 12.6 396.9 4.08 33.3
199 198 0.04666 80 1.52 0 0.404 7.107 36.6 7.309 2 329 12.6 354.31 8.61 30.3
200 199 0.03768 80 1.52 0 0.404 7.274 38.3 7.309 2 329 12.6 392.2 6.62 34.6
201 200 0.0315 95 1.47 0 0.403 6.975 15.3 7.6534 3 402 17 396.9 4.56 34.9
202 201 0.01778 95 1.47 0 0.403 7.135 13.9 7.6534 3 402 17 384.3 4.45 32.9
203 202 0.03445 82.5 2.03 0 0.415 6.162 38.4 6.27 2 348 14.7 393.77 7.43 24.1
204 203 0.02177 82.5 2.03 0 0.415 7.61 15.7 6.27 2 348 14.7 395.38 3.11 42.3
205 204 0.0351 95 2.68 0 0.4161 7.853 33.2 5.118 4 224 14.7 392.78 3.81 48.5
206 205 0.02009 95 2.68 0 0.4161 8.034 31.9 5.118 4 224 14.7 390.55 2.88 50
207 206 0.13642 0 10.59 0 0.489 5.891 22.3 3.9454 4 277 18.6 396.9 10.87 22.6
208 207 0.22969 0 10.59 0 0.489 6.326 52.5 4.3549 4 277 18.6 394.87 10.97 24.4
209 208 0.25199 0 10.59 0 0.489 5.783 72.7 4.3549 4 277 18.6 389.43 18.06 22.5
210 209 0.13587 0 10.59 1 0.489 6.064 59.1 4.2392 4 277 18.6 381.32 14.66 24.4
211 210 0.43571 0 10.59 1 0.489 5.344 100 3.875 4 277 18.6 396.9 23.09 20
212 211 0.17446 0 10.59 1 0.489 5.96 92.1 3.8771 4 277 18.6 393.25 17.27 21.7
213 212 0.37578 0 10.59 1 0.489 5.404 88.6 3.665 4 277 18.6 395.24 23.98 19.3
214 213 0.21719 0 10.59 1 0.489 5.807 53.8 3.6526 4 277 18.6 390.94 16.03 22.4
215 214 0.14052 0 10.59 0 0.489 6.375 32.3 3.9454 4 277 18.6 385.81 9.38 28.1
216 215 0.28955 0 10.59 0 0.489 5.412 9.8 3.5875 4 277 18.6 348.93 29.55 23.7
217 216 0.19802 0 10.59 0 0.489 6.182 42.4 3.9454 4 277 18.6 393.63 9.47 25
218 217 0.0456 0 13.89 1 0.55 5.888 56 3.1121 5 276 16.4 392.8 13.51 23.3
219 218 0.07013 0 13.89 0 0.55 6.642 85.1 3.4211 5 276 16.4 392.78 9.69 28.7
220 219 0.11069 0 13.89 1 0.55 5.951 93.8 2.8893 5 276 16.4 396.9 17.92 21.5
221 220 0.11425 0 13.89 1 0.55 6.373 92.4 3.3633 5 276 16.4 393.74 10.5 23
222 221 0.35809 0 6.2 1 0.507 6.951 88.5 2.8617 8 307 17.4 391.7 9.71 26.7
223 222 0.40771 0 6.2 1 0.507 6.164 91.3 3.048 8 307 17.4 395.24 21.46 21.7
224 223 0.62356 0 6.2 1 0.507 6.879 77.7 3.2721 8 307 17.4 390.39 9.93 27.5
225 224 0.6147 0 6.2 0 0.507 6.618 80.8 3.2721 8 307 17.4 396.9 7.6 30.1
226 225 0.31533 0 6.2 0 0.504 8.266 78.3 2.8944 8 307 17.4 385.05 4.14 44.8
227 226 0.52693 0 6.2 0 0.504 8.725 83 2.8944 8 307 17.4 382 4.63 50
228 227 0.38214 0 6.2 0 0.504 8.04 86.5 3.2157 8 307 17.4 387.38 3.13 37.6
229 228 0.41238 0 6.2 0 0.504 7.163 79.9 3.2157 8 307 17.4 372.08 6.36 31.6
230 229 0.29819 0 6.2 0 0.504 7.686 17 3.3751 8 307 17.4 377.51 3.92 46.7
231 230 0.44178 0 6.2 0 0.504 6.552 21.4 3.3751 8 307 17.4 380.34 3.76 31.5
232 231 0.537 0 6.2 0 0.504 5.981 68.1 3.6715 8 307 17.4 378.35 11.65 24.3
233 232 0.46296 0 6.2 0 0.504 7.412 76.9 3.6715 8 307 17.4 376.14 5.25 31.7
234 233 0.57529 0 6.2 0 0.507 8.337 73.3 3.8384 8 307 17.4 385.91 2.47 41.7
235 234 0.33147 0 6.2 0 0.507 8.247 70.4 3.6519 8 307 17.4 378.95 3.95 48.3
236 235 0.44791 0 6.2 1 0.507 6.726 66.5 3.6519 8 307 17.4 360.2 8.05 29
237 236 0.33045 0 6.2 0 0.507 6.086 61.5 3.6519 8 307 17.4 376.75 10.88 24
238 237 0.52058 0 6.2 1 0.507 6.631 76.5 4.148 8 307 17.4 388.45 9.54 25.1
239 238 0.51183 0 6.2 0 0.507 7.358 71.6 4.148 8 307 17.4 390.07 4.73 31.5
240 239 0.08244 30 4.93 0 0.428 6.481 18.5 6.1899 6 300 16.6 379.41 6.36 23.7
241 240 0.09252 30 4.93 0 0.428 6.606 42.2 6.1899 6 300 16.6 383.78 7.37 23.3
242 241 0.11329 30 4.93 0 0.428 6.897 54.3 6.3361 6 300 16.6 391.25 11.38 22
243 242 0.10612 30 4.93 0 0.428 6.095 65.1 6.3361 6 300 16.6 394.62 12.4 20.1
244 243 0.1029 30 4.93 0 0.428 6.358 52.9 7.0355 6 300 16.6 372.75 11.22 22.2
245 244 0.12757 30 4.93 0 0.428 6.393 7.8 7.0355 6 300 16.6 374.71 5.19 23.7
246 245 0.20608 22 5.86 0 0.431 5.593 76.5 7.9549 7 330 19.1 372.49 12.5 17.6
247 246 0.19133 22 5.86 0 0.431 5.605 70.2 7.9549 7 330 19.1 389.13 18.46 18.5
248 247 0.33983 22 5.86 0 0.431 6.108 34.9 8.0555 7 330 19.1 390.18 9.16 24.3
249 248 0.19657 22 5.86 0 0.431 6.226 79.2 8.0555 7 330 19.1 376.14 10.15 20.5
250 249 0.16439 22 5.86 0 0.431 6.433 49.1 7.8265 7 330 19.1 374.71 9.52 24.5
251 250 0.19073 22 5.86 0 0.431 6.718 17.5 7.8265 7 330 19.1 393.74 6.56 26.2
252 251 0.1403 22 5.86 0 0.431 6.487 13 7.3967 7 330 19.1 396.28 5.9 24.4
253 252 0.21409 22 5.86 0 0.431 6.438 8.9 7.3967 7 330 19.1 377.07 3.59 24.8
254 253 0.08221 22 5.86 0 0.431 6.957 6.8 8.9067 7 330 19.1 386.09 3.53 29.6
255 254 0.36894 22 5.86 0 0.431 8.259 8.4 8.9067 7 330 19.1 396.9 3.54 42.8
256 255 0.04819 80 3.64 0 0.392 6.108 32 9.2203 1 315 16.4 392.89 6.57 21.9
257 256 0.03548 80 3.64 0 0.392 5.876 19.1 9.2203 1 315 16.4 395.18 9.25 20.9
258 257 0.01538 90 3.75 0 0.394 7.454 34.2 6.3361 3 244 15.9 386.34 3.11 44
259 258 0.61154 20 3.97 0 0.647 8.704 86.9 1.801 5 264 13 389.7 5.12 50
260 259 0.66351 20 3.97 0 0.647 7.333 100 1.8946 5 264 13 383.29 7.79 36
261 260 0.65665 20 3.97 0 0.647 6.842 100 2.0107 5 264 13 391.93 6.9 30.1
262 261 0.54011 20 3.97 0 0.647 7.203 81.8 2.1121 5 264 13 392.8 9.59 33.8
263 262 0.53412 20 3.97 0 0.647 7.52 89.4 2.1398 5 264 13 388.37 7.26 43.1
264 263 0.52014 20 3.97 0 0.647 8.398 91.5 2.2885 5 264 13 386.86 5.91 48.8
265 264 0.82526 20 3.97 0 0.647 7.327 94.5 2.0788 5 264 13 393.42 11.25 31
266 265 0.55007 20 3.97 0 0.647 7.206 91.6 1.9301 5 264 13 387.89 8.1 36.5
267 266 0.76162 20 3.97 0 0.647 5.56 62.8 1.9865 5 264 13 392.4 10.45 22.8
268 267 0.7857 20 3.97 0 0.647 7.014 84.6 2.1329 5 264 13 384.07 14.79 30.7
269 268 0.57834 20 3.97 0 0.575 8.297 67 2.4216 5 264 13 384.54 7.44 50
270 269 0.5405 20 3.97 0 0.575 7.47 52.6 2.872 5 264 13 390.3 3.16 43.5
271 270 0.09065 20 6.96 1 0.464 5.92 61.5 3.9175 3 223 18.6 391.34 13.65 20.7
272 271 0.29916 20 6.96 0 0.464 5.856 42.1 4.429 3 223 18.6 388.65 13 21.1
273 272 0.16211 20 6.96 0 0.464 6.24 16.3 4.429 3 223 18.6 396.9 6.59 25.2
274 273 0.1146 20 6.96 0 0.464 6.538 58.7 3.9175 3 223 18.6 394.96 7.73 24.4
275 274 0.22188 20 6.96 1 0.464 7.691 51.8 4.3665 3 223 18.6 390.77 6.58 35.2
276 275 0.05644 40 6.41 1 0.447 6.758 32.9 4.0776 4 254 17.6 396.9 3.53 32.4
277 276 0.09604 40 6.41 0 0.447 6.854 42.8 4.2673 4 254 17.6 396.9 2.98 32
278 277 0.10469 40 6.41 1 0.447 7.267 49 4.7872 4 254 17.6 389.25 6.05 33.2
279 278 0.06127 40 6.41 1 0.447 6.826 27.6 4.8628 4 254 17.6 393.45 4.16 33.1
280 279 0.07978 40 6.41 0 0.447 6.482 32.1 4.1403 4 254 17.6 396.9 7.19 29.1
281 280 0.21038 20 3.33 0 0.4429 6.812 32.2 4.1007 5 216 14.9 396.9 4.85 35.1
282 281 0.03578 20 3.33 0 0.4429 7.82 64.5 4.6947 5 216 14.9 387.31 3.76 45.4
283 282 0.03705 20 3.33 0 0.4429 6.968 37.2 5.2447 5 216 14.9 392.23 4.59 35.4
284 283 0.06129 20 3.33 1 0.4429 7.645 49.7 5.2119 5 216 14.9 377.07 3.01 46
285 284 0.01501 90 1.21 1 0.401 7.923 24.8 5.885 1 198 13.6 395.52 3.16 50
286 285 0.00906 90 2.97 0 0.4 7.088 20.8 7.3073 1 285 15.3 394.72 7.85 32.2
287 286 0.01096 55 2.25 0 0.389 6.453 31.9 7.3073 1 300 15.3 394.72 8.23 22
288 287 0.01965 80 1.76 0 0.385 6.23 31.5 9.0892 1 241 18.2 341.6 12.93 20.1
289 288 0.03871 52.5 5.32 0 0.405 6.209 31.3 7.3172 6 293 16.6 396.9 7.14 23.2
290 289 0.0459 52.5 5.32 0 0.405 6.315 45.6 7.3172 6 293 16.6 396.9 7.6 22.3
291 290 0.04297 52.5 5.32 0 0.405 6.565 22.9 7.3172 6 293 16.6 371.72 9.51 24.8
292 291 0.03502 80 4.95 0 0.411 6.861 27.9 5.1167 4 245 19.2 396.9 3.33 28.5
293 292 0.07886 80 4.95 0 0.411 7.148 27.7 5.1167 4 245 19.2 396.9 3.56 37.3
294 293 0.03615 80 4.95 0 0.411 6.63 23.4 5.1167 4 245 19.2 396.9 4.7 27.9
295 294 0.08265 0 13.92 0 0.437 6.127 18.4 5.5027 4 289 16 396.9 8.58 23.9
296 295 0.08199 0 13.92 0 0.437 6.009 42.3 5.5027 4 289 16 396.9 10.4 21.7
297 296 0.12932 0 13.92 0 0.437 6.678 31.1 5.9604 4 289 16 396.9 6.27 28.6
298 297 0.05372 0 13.92 0 0.437 6.549 51 5.9604 4 289 16 392.85 7.39 27.1
299 298 0.14103 0 13.92 0 0.437 5.79 58 6.32 4 289 16 396.9 15.84 20.3
300 299 0.06466 70 2.24 0 0.4 6.345 20.1 7.8278 5 358 14.8 368.24 4.97 22.5
301 300 0.05561 70 2.24 0 0.4 7.041 10 7.8278 5 358 14.8 371.58 4.74 29
302 301 0.04417 70 2.24 0 0.4 6.871 47.4 7.8278 5 358 14.8 390.86 6.07 24.8
303 302 0.03537 34 6.09 0 0.433 6.59 40.4 5.4917 7 329 16.1 395.75 9.5 22
304 303 0.09266 34 6.09 0 0.433 6.495 18.4 5.4917 7 329 16.1 383.61 8.67 26.4
305 304 0.1 34 6.09 0 0.433 6.982 17.7 5.4917 7 329 16.1 390.43 4.86 33.1
306 305 0.05515 33 2.18 0 0.472 7.236 41.1 4.022 7 222 18.4 393.68 6.93 36.1
307 306 0.05479 33 2.18 0 0.472 6.616 58.1 3.37 7 222 18.4 393.36 8.93 28.4
308 307 0.07503 33 2.18 0 0.472 7.42 71.9 3.0992 7 222 18.4 396.9 6.47 33.4
309 308 0.04932 33 2.18 0 0.472 6.849 70.3 3.1827 7 222 18.4 396.9 7.53 28.2
310 309 0.49298 0 9.9 0 0.544 6.635 82.5 3.3175 4 304 18.4 396.9 4.54 22.8
311 310 0.3494 0 9.9 0 0.544 5.972 76.7 3.1025 4 304 18.4 396.24 9.97 20.3
312 311 2.63548 0 9.9 0 0.544 4.973 37.8 2.5194 4 304 18.4 350.45 12.64 16.1
313 312 0.79041 0 9.9 0 0.544 6.122 52.8 2.6403 4 304 18.4 396.9 5.98 22.1
314 313 0.26169 0 9.9 0 0.544 6.023 90.4 2.834 4 304 18.4 396.3 11.72 19.4
315 314 0.26938 0 9.9 0 0.544 6.266 82.8 3.2628 4 304 18.4 393.39 7.9 21.6
316 315 0.3692 0 9.9 0 0.544 6.567 87.3 3.6023 4 304 18.4 395.69 9.28 23.8
317 316 0.25356 0 9.9 0 0.544 5.705 77.7 3.945 4 304 18.4 396.42 11.5 16.2
318 317 0.31827 0 9.9 0 0.544 5.914 83.2 3.9986 4 304 18.4 390.7 18.33 17.8
319 318 0.24522 0 9.9 0 0.544 5.782 71.7 4.0317 4 304 18.4 396.9 15.94 19.8
320 319 0.40202 0 9.9 0 0.544 6.382 67.2 3.5325 4 304 18.4 395.21 10.36 23.1
321 320 0.47547 0 9.9 0 0.544 6.113 58.8 4.0019 4 304 18.4 396.23 12.73 21
322 321 0.1676 0 7.38 0 0.493 6.426 52.3 4.5404 5 287 19.6 396.9 7.2 23.8
323 322 0.18159 0 7.38 0 0.493 6.376 54.3 4.5404 5 287 19.6 396.9 6.87 23.1
324 323 0.35114 0 7.38 0 0.493 6.041 49.9 4.7211 5 287 19.6 396.9 7.7 20.4
325 324 0.28392 0 7.38 0 0.493 5.708 74.3 4.7211 5 287 19.6 391.13 11.74 18.5
326 325 0.34109 0 7.38 0 0.493 6.415 40.1 4.7211 5 287 19.6 396.9 6.12 25
327 326 0.19186 0 7.38 0 0.493 6.431 14.7 5.4159 5 287 19.6 393.68 5.08 24.6
328 327 0.30347 0 7.38 0 0.493 6.312 28.9 5.4159 5 287 19.6 396.9 6.15 23
329 328 0.24103 0 7.38 0 0.493 6.083 43.7 5.4159 5 287 19.6 396.9 12.79 22.2
330 329 0.06617 0 3.24 0 0.46 5.868 25.8 5.2146 4 430 16.9 382.44 9.97 19.3
331 330 0.06724 0 3.24 0 0.46 6.333 17.2 5.2146 4 430 16.9 375.21 7.34 22.6
332 331 0.04544 0 3.24 0 0.46 6.144 32.2 5.8736 4 430 16.9 368.57 9.09 19.8
333 332 0.05023 35 6.06 0 0.4379 5.706 28.4 6.6407 1 304 16.9 394.02 12.43 17.1
334 333 0.03466 35 6.06 0 0.4379 6.031 23.3 6.6407 1 304 16.9 362.25 7.83 19.4
335 334 0.05083 0 5.19 0 0.515 6.316 38.1 6.4584 5 224 20.2 389.71 5.68 22.2
336 335 0.03738 0 5.19 0 0.515 6.31 38.5 6.4584 5 224 20.2 389.4 6.75 20.7
337 336 0.03961 0 5.19 0 0.515 6.037 34.5 5.9853 5 224 20.2 396.9 8.01 21.1
338 337 0.03427 0 5.19 0 0.515 5.869 46.3 5.2311 5 224 20.2 396.9 9.8 19.5
339 338 0.03041 0 5.19 0 0.515 5.895 59.6 5.615 5 224 20.2 394.81 10.56 18.5
340 339 0.03306 0 5.19 0 0.515 6.059 37.3 4.8122 5 224 20.2 396.14 8.51 20.6
341 340 0.05497 0 5.19 0 0.515 5.985 45.4 4.8122 5 224 20.2 396.9 9.74 19
342 341 0.06151 0 5.19 0 0.515 5.968 58.5 4.8122 5 224 20.2 396.9 9.29 18.7
343 342 0.01301 35 1.52 0 0.442 7.241 49.3 7.0379 1 284 15.5 394.74 5.49 32.7
344 343 0.02498 0 1.89 0 0.518 6.54 59.7 6.2669 1 422 15.9 389.96 8.65 16.5
345 344 0.02543 55 3.78 0 0.484 6.696 56.4 5.7321 5 370 17.6 396.9 7.18 23.9
346 345 0.03049 55 3.78 0 0.484 6.874 28.1 6.4654 5 370 17.6 387.97 4.61 31.2
347 346 0.03113 0 4.39 0 0.442 6.014 48.5 8.0136 3 352 18.8 385.64 10.53 17.5
348 347 0.06162 0 4.39 0 0.442 5.898 52.3 8.0136 3 352 18.8 364.61 12.67 17.2
349 348 0.0187 85 4.15 0 0.429 6.516 27.7 8.5353 4 351 17.9 392.43 6.36 23.1
350 349 0.01501 80 2.01 0 0.435 6.635 29.7 8.344 4 280 17 390.94 5.99 24.5
351 350 0.02899 40 1.25 0 0.429 6.939 34.5 8.7921 1 335 19.7 389.85 5.89 26.6
352 351 0.06211 40 1.25 0 0.429 6.49 44.4 8.7921 1 335 19.7 396.9 5.98 22.9
353 352 0.0795 60 1.69 0 0.411 6.579 35.9 10.7103 4 411 18.3 370.78 5.49 24.1
354 353 0.07244 60 1.69 0 0.411 5.884 18.5 10.7103 4 411 18.3 392.33 7.79 18.6
355 354 0.01709 90 2.02 0 0.41 6.728 36.1 12.1265 5 187 17 384.46 4.5 30.1
356 355 0.04301 80 1.91 0 0.413 5.663 21.9 10.5857 4 334 22 382.8 8.05 18.2
357 356 0.10659 80 1.91 0 0.413 5.936 19.5 10.5857 4 334 22 376.04 5.57 20.6
358 357 8.98296 0 18.1 1 0.77 6.212 97.4 2.1222 24 666 20.2 377.73 17.6 17.8
359 358 3.8497 0 18.1 1 0.77 6.395 91 2.5052 24 666 20.2 391.34 13.27 21.7
360 359 5.20177 0 18.1 1 0.77 6.127 83.4 2.7227 24 666 20.2 395.43 11.48 22.7
361 360 4.26131 0 18.1 0 0.77 6.112 81.3 2.5091 24 666 20.2 390.74 12.67 22.6
362 361 4.54192 0 18.1 0 0.77 6.398 88 2.5182 24 666 20.2 374.56 7.79 25
363 362 3.83684 0 18.1 0 0.77 6.251 91.1 2.2955 24 666 20.2 350.65 14.19 19.9
364 363 3.67822 0 18.1 0 0.77 5.362 96.2 2.1036 24 666 20.2 380.79 10.19 20.8
365 364 4.22239 0 18.1 1 0.77 5.803 89 1.9047 24 666 20.2 353.04 14.64 16.8
366 365 3.47428 0 18.1 1 0.718 8.78 82.9 1.9047 24 666 20.2 354.55 5.29 21.9
367 366 4.55587 0 18.1 0 0.718 3.561 87.9 1.6132 24 666 20.2 354.7 7.12 27.5
368 367 3.69695 0 18.1 0 0.718 4.963 91.4 1.7523 24 666 20.2 316.03 14 21.9
369 368 13.5222 0 18.1 0 0.631 3.863 100 1.5106 24 666 20.2 131.42 13.33 23.1
370 369 4.89822 0 18.1 0 0.631 4.97 100 1.3325 24 666 20.2 375.52 3.26 50
371 370 5.66998 0 18.1 1 0.631 6.683 96.8 1.3567 24 666 20.2 375.33 3.73 50
372 371 6.53876 0 18.1 1 0.631 7.016 97.5 1.2024 24 666 20.2 392.05 2.96 50
373 372 9.2323 0 18.1 0 0.631 6.216 100 1.1691 24 666 20.2 366.15 9.53 50
374 373 8.26725 0 18.1 1 0.668 5.875 89.6 1.1296 24 666 20.2 347.88 8.88 50
375 374 11.1081 0 18.1 0 0.668 4.906 100 1.1742 24 666 20.2 396.9 34.77 13.8
376 375 18.4982 0 18.1 0 0.668 4.138 100 1.137 24 666 20.2 396.9 37.97 13.8
377 376 19.6091 0 18.1 0 0.671 7.313 97.9 1.3163 24 666 20.2 396.9 13.44 15
378 377 15.288 0 18.1 0 0.671 6.649 93.3 1.3449 24 666 20.2 363.02 23.24 13.9
379 378 9.82349 0 18.1 0 0.671 6.794 98.8 1.358 24 666 20.2 396.9 21.24 13.3
380 379 23.6482 0 18.1 0 0.671 6.38 96.2 1.3861 24 666 20.2 396.9 23.69 13.1
381 380 17.8667 0 18.1 0 0.671 6.223 100 1.3861 24 666 20.2 393.74 21.78 10.2
382 381 88.9762 0 18.1 0 0.671 6.968 91.9 1.4165 24 666 20.2 396.9 17.21 10.4
383 382 15.8744 0 18.1 0 0.671 6.545 99.1 1.5192 24 666 20.2 396.9 21.08 10.9
384 383 9.18702 0 18.1 0 0.7 5.536 100 1.5804 24 666 20.2 396.9 23.6 11.3
385 384 7.99248 0 18.1 0 0.7 5.52 100 1.5331 24 666 20.2 396.9 24.56 12.3
386 385 20.0849 0 18.1 0 0.7 4.368 91.2 1.4395 24 666 20.2 285.83 30.63 8.8
387 386 16.8118 0 18.1 0 0.7 5.277 98.1 1.4261 24 666 20.2 396.9 30.81 7.2
388 387 24.3938 0 18.1 0 0.7 4.652 100 1.4672 24 666 20.2 396.9 28.28 10.5
389 388 22.5971 0 18.1 0 0.7 5 89.5 1.5184 24 666 20.2 396.9 31.99 7.4
390 389 14.3337 0 18.1 0 0.7 4.88 100 1.5895 24 666 20.2 372.92 30.62 10.2
391 390 8.15174 0 18.1 0 0.7 5.39 98.9 1.7281 24 666 20.2 396.9 20.85 11.5
392 391 6.96215 0 18.1 0 0.7 5.713 97 1.9265 24 666 20.2 394.43 17.11 15.1
393 392 5.29305 0 18.1 0 0.7 6.051 82.5 2.1678 24 666 20.2 378.38 18.76 23.2
394 393 11.5779 0 18.1 0 0.7 5.036 97 1.77 24 666 20.2 396.9 25.68 9.7
395 394 8.64476 0 18.1 0 0.693 6.193 92.6 1.7912 24 666 20.2 396.9 15.17 13.8
396 395 13.3598 0 18.1 0 0.693 5.887 94.7 1.7821 24 666 20.2 396.9 16.35 12.7
397 396 8.71675 0 18.1 0 0.693 6.471 98.8 1.7257 24 666 20.2 391.98 17.12 13.1
398 397 5.87205 0 18.1 0 0.693 6.405 96 1.6768 24 666 20.2 396.9 19.37 12.5
399 398 7.67202 0 18.1 0 0.693 5.747 98.9 1.6334 24 666 20.2 393.1 19.92 8.5
400 399 38.3518 0 18.1 0 0.693 5.453 100 1.4896 24 666 20.2 396.9 30.59 5
401 400 9.91655 0 18.1 0 0.693 5.852 77.8 1.5004 24 666 20.2 338.16 29.97 6.3
402 401 25.0461 0 18.1 0 0.693 5.987 100 1.5888 24 666 20.2 396.9 26.77 5.6
403 402 14.2362 0 18.1 0 0.693 6.343 100 1.5741 24 666 20.2 396.9 20.32 7.2
404 403 9.59571 0 18.1 0 0.693 6.404 100 1.639 24 666 20.2 376.11 20.31 12.1
405 404 24.8017 0 18.1 0 0.693 5.349 96 1.7028 24 666 20.2 396.9 19.77 8.3
406 405 41.5292 0 18.1 0 0.693 5.531 85.4 1.6074 24 666 20.2 329.46 27.38 8.5
407 406 67.9208 0 18.1 0 0.693 5.683 100 1.4254 24 666 20.2 384.97 22.98 5
408 407 20.7162 0 18.1 0 0.659 4.138 100 1.1781 24 666 20.2 370.22 23.34 11.9
409 408 11.9511 0 18.1 0 0.659 5.608 100 1.2852 24 666 20.2 332.09 12.13 27.9
410 409 7.40389 0 18.1 0 0.597 5.617 97.9 1.4547 24 666 20.2 314.64 26.4 17.2
411 410 14.4383 0 18.1 0 0.597 6.852 100 1.4655 24 666 20.2 179.36 19.78 27.5
412 411 51.1358 0 18.1 0 0.597 5.757 100 1.413 24 666 20.2 2.6 10.11 15
413 412 14.0507 0 18.1 0 0.597 6.657 100 1.5275 24 666 20.2 35.05 21.22 17.2
414 413 18.811 0 18.1 0 0.597 4.628 100 1.5539 24 666 20.2 28.79 34.37 17.9
415 414 28.6558 0 18.1 0 0.597 5.155 100 1.5894 24 666 20.2 210.97 20.08 16.3
416 415 45.7461 0 18.1 0 0.693 4.519 100 1.6582 24 666 20.2 88.27 36.98 7
417 416 18.0846 0 18.1 0 0.679 6.434 100 1.8347 24 666 20.2 27.25 29.05 7.2
418 417 10.8342 0 18.1 0 0.679 6.782 90.8 1.8195 24 666 20.2 21.57 25.79 7.5
419 418 25.9406 0 18.1 0 0.679 5.304 89.1 1.6475 24 666 20.2 127.36 26.64 10.4
420 419 73.5341 0 18.1 0 0.679 5.957 100 1.8026 24 666 20.2 16.45 20.62 8.8
421 420 11.8123 0 18.1 0 0.718 6.824 76.5 1.794 24 666 20.2 48.45 22.74 8.4
422 421 11.0874 0 18.1 0 0.718 6.411 100 1.8589 24 666 20.2 318.75 15.02 16.7
423 422 7.02259 0 18.1 0 0.718 6.006 95.3 1.8746 24 666 20.2 319.98 15.7 14.2
424 423 12.0482 0 18.1 0 0.614 5.648 87.6 1.9512 24 666 20.2 291.55 14.1 20.8
425 424 7.05042 0 18.1 0 0.614 6.103 85.1 2.0218 24 666 20.2 2.52 23.29 13.4
426 425 8.79212 0 18.1 0 0.584 5.565 70.6 2.0635 24 666 20.2 3.65 17.16 11.7
427 426 15.8603 0 18.1 0 0.679 5.896 95.4 1.9096 24 666 20.2 7.68 24.39 8.3
428 427 12.2472 0 18.1 0 0.584 5.837 59.7 1.9976 24 666 20.2 24.65 15.69 10.2
429 428 37.6619 0 18.1 0 0.679 6.202 78.7 1.8629 24 666 20.2 18.82 14.52 10.9
430 429 7.36711 0 18.1 0 0.679 6.193 78.1 1.9356 24 666 20.2 96.73 21.52 11
431 430 9.33889 0 18.1 0 0.679 6.38 95.6 1.9682 24 666 20.2 60.72 24.08 9.5
432 431 8.49213 0 18.1 0 0.584 6.348 86.1 2.0527 24 666 20.2 83.45 17.64 14.5
433 432 10.0623 0 18.1 0 0.584 6.833 94.3 2.0882 24 666 20.2 81.33 19.69 14.1
434 433 6.44405 0 18.1 0 0.584 6.425 74.8 2.2004 24 666 20.2 97.95 12.03 16.1
435 434 5.58107 0 18.1 0 0.713 6.436 87.9 2.3158 24 666 20.2 100.19 16.22 14.3
436 435 13.9134 0 18.1 0 0.713 6.208 95 2.2222 24 666 20.2 100.63 15.17 11.7
437 436 11.1604 0 18.1 0 0.74 6.629 94.6 2.1247 24 666 20.2 109.85 23.27 13.4
438 437 14.4208 0 18.1 0 0.74 6.461 93.3 2.0026 24 666 20.2 27.49 18.05 9.6
439 438 15.1772 0 18.1 0 0.74 6.152 100 1.9142 24 666 20.2 9.32 26.45 8.7
440 439 13.6781 0 18.1 0 0.74 5.935 87.9 1.8206 24 666 20.2 68.95 34.02 8.4
441 440 9.39063 0 18.1 0 0.74 5.627 93.9 1.8172 24 666 20.2 396.9 22.88 12.8
442 441 22.0511 0 18.1 0 0.74 5.818 92.4 1.8662 24 666 20.2 391.45 22.11 10.5
443 442 9.72418 0 18.1 0 0.74 6.406 97.2 2.0651 24 666 20.2 385.96 19.52 17.1
444 443 5.66637 0 18.1 0 0.74 6.219 100 2.0048 24 666 20.2 395.69 16.59 18.4
445 444 9.96654 0 18.1 0 0.74 6.485 100 1.9784 24 666 20.2 386.73 18.85 15.4
446 445 12.8023 0 18.1 0 0.74 5.854 96.6 1.8956 24 666 20.2 240.52 23.79 10.8
447 446 10.6718 0 18.1 0 0.74 6.459 94.8 1.9879 24 666 20.2 43.06 23.98 11.8
448 447 6.28807 0 18.1 0 0.74 6.341 96.4 2.072 24 666 20.2 318.01 17.79 14.9
449 448 9.92485 0 18.1 0 0.74 6.251 96.6 2.198 24 666 20.2 388.52 16.44 12.6
450 449 9.32909 0 18.1 0 0.713 6.185 98.7 2.2616 24 666 20.2 396.9 18.13 14.1
451 450 7.52601 0 18.1 0 0.713 6.417 98.3 2.185 24 666 20.2 304.21 19.31 13
452 451 6.71772 0 18.1 0 0.713 6.749 92.6 2.3236 24 666 20.2 0.32 17.44 13.4
453 452 5.44114 0 18.1 0 0.713 6.655 98.2 2.3552 24 666 20.2 355.29 17.73 15.2
454 453 5.09017 0 18.1 0 0.713 6.297 91.8 2.3682 24 666 20.2 385.09 17.27 16.1
455 454 8.24809 0 18.1 0 0.713 7.393 99.3 2.4527 24 666 20.2 375.87 16.74 17.8
456 455 9.51363 0 18.1 0 0.713 6.728 94.1 2.4961 24 666 20.2 6.68 18.71 14.9
457 456 4.75237 0 18.1 0 0.713 6.525 86.5 2.4358 24 666 20.2 50.92 18.13 14.1
458 457 4.66883 0 18.1 0 0.713 5.976 87.9 2.5806 24 666 20.2 10.48 19.01 12.7
459 458 8.20058 0 18.1 0 0.713 5.936 80.3 2.7792 24 666 20.2 3.5 16.94 13.5
460 459 7.75223 0 18.1 0 0.713 6.301 83.7 2.7831 24 666 20.2 272.21 16.23 14.9
461 460 6.80117 0 18.1 0 0.713 6.081 84.4 2.7175 24 666 20.2 396.9 14.7 20
462 461 4.81213 0 18.1 0 0.713 6.701 90 2.5975 24 666 20.2 255.23 16.42 16.4
463 462 3.69311 0 18.1 0 0.713 6.376 88.4 2.5671 24 666 20.2 391.43 14.65 17.7
464 463 6.65492 0 18.1 0 0.713 6.317 83 2.7344 24 666 20.2 396.9 13.99 19.5
465 464 5.82115 0 18.1 0 0.713 6.513 89.9 2.8016 24 666 20.2 393.82 10.29 20.2
466 465 7.83932 0 18.1 0 0.655 6.209 65.4 2.9634 24 666 20.2 396.9 13.22 21.4
467 466 3.1636 0 18.1 0 0.655 5.759 48.2 3.0665 24 666 20.2 334.4 14.13 19.9
468 467 3.77498 0 18.1 0 0.655 5.952 84.7 2.8715 24 666 20.2 22.01 17.15 19
469 468 4.42228 0 18.1 0 0.584 6.003 94.5 2.5403 24 666 20.2 331.29 21.32 19.1
470 469 15.5757 0 18.1 0 0.58 5.926 71 2.9084 24 666 20.2 368.74 18.13 19.1
471 470 13.0751 0 18.1 0 0.58 5.713 56.7 2.8237 24 666 20.2 396.9 14.76 20.1
472 471 4.34879 0 18.1 0 0.58 6.167 84 3.0334 24 666 20.2 396.9 16.29 19.9
473 472 4.03841 0 18.1 0 0.532 6.229 90.7 3.0993 24 666 20.2 395.33 12.87 19.6
474 473 3.56868 0 18.1 0 0.58 6.437 75 2.8965 24 666 20.2 393.37 14.36 23.2
475 474 4.64689 0 18.1 0 0.614 6.98 67.6 2.5329 24 666 20.2 374.68 11.66 29.8
476 475 8.05579 0 18.1 0 0.584 5.427 95.4 2.4298 24 666 20.2 352.58 18.14 13.8
477 476 6.39312 0 18.1 0 0.584 6.162 97.4 2.206 24 666 20.2 302.76 24.1 13.3
478 477 4.87141 0 18.1 0 0.614 6.484 93.6 2.3053 24 666 20.2 396.21 18.68 16.7
479 478 15.0234 0 18.1 0 0.614 5.304 97.3 2.1007 24 666 20.2 349.48 24.91 12
480 479 10.233 0 18.1 0 0.614 6.185 96.7 2.1705 24 666 20.2 379.7 18.03 14.6
481 480 14.3337 0 18.1 0 0.614 6.229 88 1.9512 24 666 20.2 383.32 13.11 21.4
482 481 5.82401 0 18.1 0 0.532 6.242 64.7 3.4242 24 666 20.2 396.9 10.74 23
483 482 5.70818 0 18.1 0 0.532 6.75 74.9 3.3317 24 666 20.2 393.07 7.74 23.7
484 483 5.73116 0 18.1 0 0.532 7.061 77 3.4106 24 666 20.2 395.28 7.01 25
485 484 2.81838 0 18.1 0 0.532 5.762 40.3 4.0983 24 666 20.2 392.92 10.42 21.8
486 485 2.37857 0 18.1 0 0.583 5.871 41.9 3.724 24 666 20.2 370.73 13.34 20.6
487 486 3.67367 0 18.1 0 0.583 6.312 51.9 3.9917 24 666 20.2 388.62 10.58 21.2
488 487 5.69175 0 18.1 0 0.583 6.114 79.8 3.5459 24 666 20.2 392.68 14.98 19.1
489 488 4.83567 0 18.1 0 0.583 5.905 53.2 3.1523 24 666 20.2 388.22 11.45 20.6
490 489 0.15086 0 27.74 0 0.609 5.454 92.7 1.8209 4 711 20.1 395.09 18.06 15.2
491 490 0.18337 0 27.74 0 0.609 5.414 98.3 1.7554 4 711 20.1 344.05 23.97 7
492 491 0.20746 0 27.74 0 0.609 5.093 98 1.8226 4 711 20.1 318.43 29.68 8.1
493 492 0.10574 0 27.74 0 0.609 5.983 98.8 1.8681 4 711 20.1 390.11 18.07 13.6
494 493 0.11132 0 27.74 0 0.609 5.983 83.5 2.1099 4 711 20.1 396.9 13.35 20.1
495 494 0.17331 0 9.69 0 0.585 5.707 54 2.3817 6 391 19.2 396.9 12.01 21.8
496 495 0.27957 0 9.69 0 0.585 5.926 42.6 2.3817 6 391 19.2 396.9 13.59 24.5
497 496 0.17899 0 9.69 0 0.585 5.67 28.8 2.7986 6 391 19.2 393.29 17.6 23.1
498 497 0.2896 0 9.69 0 0.585 5.39 72.9 2.7986 6 391 19.2 396.9 21.14 19.7
499 498 0.26838 0 9.69 0 0.585 5.794 70.6 2.8927 6 391 19.2 396.9 14.1 18.3
500 499 0.23912 0 9.69 0 0.585 6.019 65.3 2.4091 6 391 19.2 396.9 12.92 21.2
501 500 0.17783 0 9.69 0 0.585 5.569 73.5 2.3999 6 391 19.2 395.77 15.1 17.5
502 501 0.22438 0 9.69 0 0.585 6.027 79.7 2.4982 6 391 19.2 396.9 14.33 16.8
503 502 0.06263 0 11.93 0 0.573 6.593 69.1 2.4786 1 273 21 391.99 9.67 22.4
504 503 0.04527 0 11.93 0 0.573 6.12 76.7 2.2875 1 273 21 396.9 9.08 20.6
505 504 0.06076 0 11.93 0 0.573 6.976 91 2.1675 1 273 21 396.9 5.64 23.9
506 505 0.10959 0 11.93 0 0.573 6.794 89.3 2.3889 1 273 21 393.45 6.48 22
507 506 0.04741 0 11.93 0 0.573 6.03 80.8 2.505 1 273 21 396.9 7.88 11.9

View File

@ -0,0 +1,46 @@
"""
Column Meaning
crim Per capita crime rate by town
zn Proportion of residential land zoned for lots over 25,000 sq.ft.
indus Proportion of non-retail business acres per town
chas Charles River dummy variable (1 if tract bounds river, 0 otherwise)
nox Nitric oxides concentration (parts per 10 million)
rm Average number of rooms per dwelling
age Proportion of owner-occupied units built prior to 1940
dis Weighted distances to five Boston employment centers
rad Index of accessibility to radial highways
tax Full-value property-tax rate per $10,000
ptratio Pupil-teacher ratio by town
black 1000(Bk - 0.63)^2, where Bk is the proportion of Black people by town
lstat % lower status of the population
medv Median value of owner-occupied homes in $1000s (target variable)
"""
import pandas as pd
import torch
import torch.nn as nn
df = pd.read_csv("./RegressionModels/BostonHousing/Boston.csv")
# print(df.iloc[:,0:14])
X = torch.tensor(df.iloc[:,0:14].values, dtype=torch.float32)
Y = torch.tensor(df["medv"].values, dtype=torch.float32)
model = torch.nn.Sequential(
torch.nn.Linear(14, 1)
)
loss_fn = torch.nn.MSELoss()
optimizer = torch.optim.SGD(model.parameters(), lr=5e-9)
for epoch in range(2000):
predict_y = model(X)
loss = loss_fn(predict_y, Y)
optimizer.zero_grad()
loss.backward()
optimizer.step()
if epoch % 99 ==0:
print(f'Epoch: {epoch}, Loss: {loss.item():.2f}')

View File

@ -0,0 +1,47 @@
import pandas as pd
import torch
import torch.nn as nn
from sklearn.preprocessing import StandardScaler
df = pd.read_csv("./RegressionModels/CaliforniaHousing/housing.csv")
df = df.dropna(subset=df.columns[:8])
df['ocean_proximity_encoded'] = df['ocean_proximity'].astype('category').cat.codes #Encodes text values as numerical ones
# print(df)
# print(df.iloc[:,0:8])
scaler_x = StandardScaler()
scaled_X = scaler_x.fit_transform(df.iloc[:,0:8].join(df["ocean_proximity_encoded"]).values)
X = torch.tensor(scaled_X, dtype=torch.float32)
scaler_y = StandardScaler()
scaled_Y = scaler_y.fit_transform(df["median_house_value"].values.reshape(-1,1))
Y = torch.tensor(scaled_Y, dtype=torch.float32)
model = torch.nn.Sequential(
torch.nn.Linear(9, 18),
torch.nn.ReLU(),
torch.nn.Linear(18, 1)
)
loss_fn = torch.nn.MSELoss()
optimizer = torch.optim.SGD(model.parameters(), lr=1e-2)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
model = model.to(device)
X = X.to(device)
Y = Y.to(device)
for epoch in range(3000):
pred_y = model(X)
loss = loss_fn(pred_y, Y)
optimizer.zero_grad()
loss.backward()
optimizer.step()
if epoch % 99 == 0:
print('Epoch: ', epoch, f"loss: {loss.item():.2f}")

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,31 @@
,YearsExperience,Salary
0,1.2000000000000002,39344.0
1,1.4000000000000001,46206.0
2,1.6,37732.0
3,2.1,43526.0
4,2.3000000000000003,39892.0
5,3.0,56643.0
6,3.1,60151.0
7,3.3000000000000003,54446.0
8,3.3000000000000003,64446.0
9,3.8000000000000003,57190.0
10,4.0,63219.0
11,4.1,55795.0
12,4.1,56958.0
13,4.199999999999999,57082.0
14,4.6,61112.0
15,5.0,67939.0
16,5.199999999999999,66030.0
17,5.3999999999999995,83089.0
18,6.0,81364.0
19,6.1,93941.0
20,6.8999999999999995,91739.0
21,7.199999999999999,98274.0
22,8.0,101303.0
23,8.299999999999999,113813.0
24,8.799999999999999,109432.0
25,9.1,105583.0
26,9.6,116970.0
27,9.7,112636.0
28,10.4,122392.0
29,10.6,121873.0
1 YearsExperience Salary
2 0 1.2000000000000002 39344.0
3 1 1.4000000000000001 46206.0
4 2 1.6 37732.0
5 3 2.1 43526.0
6 4 2.3000000000000003 39892.0
7 5 3.0 56643.0
8 6 3.1 60151.0
9 7 3.3000000000000003 54446.0
10 8 3.3000000000000003 64446.0
11 9 3.8000000000000003 57190.0
12 10 4.0 63219.0
13 11 4.1 55795.0
14 12 4.1 56958.0
15 13 4.199999999999999 57082.0
16 14 4.6 61112.0
17 15 5.0 67939.0
18 16 5.199999999999999 66030.0
19 17 5.3999999999999995 83089.0
20 18 6.0 81364.0
21 19 6.1 93941.0
22 20 6.8999999999999995 91739.0
23 21 7.199999999999999 98274.0
24 22 8.0 101303.0
25 23 8.299999999999999 113813.0
26 24 8.799999999999999 109432.0
27 25 9.1 105583.0
28 26 9.6 116970.0
29 27 9.7 112636.0
30 28 10.4 122392.0
31 29 10.6 121873.0

View File

@ -0,0 +1,34 @@
import torch
import pandas as pd
import matplotlib.pyplot as plt
file = './RegressionModels/PredictSallary/Salary_dataset.csv'
df = pd.read_csv(file)
X = torch.tensor(df["YearsExperience"].values, dtype=torch.float32).unsqueeze(1)
y = torch.tensor(df['Salary'].values, dtype=torch.float32).unsqueeze(1)
model = torch.nn.Sequential(
torch.nn.Linear(1,1)
)
loss_fn = torch.nn.MSELoss()
optimizer = torch.optim.SGD(model.parameters(), lr=1e-4)
# 5. Training loop
for epoch in range(1000):
y_pred = model(X)
loss = loss_fn(y_pred, y)
optimizer.zero_grad()
loss.backward()
optimizer.step()
if epoch % 100 == 99:
print(f'Epoch {epoch+1}, Loss: {loss.item():.2f}')
# 6. Plot the results
plt.scatter(X.numpy(), y.numpy(), label='Actual data')
plt.plot(X.numpy(), model(X).detach().numpy(), color='red', label='Model prediction')
plt.xlabel('Years of Experience')
plt.ylabel('Salary')
plt.legend()
plt.show()