Commit 2ae7f344 authored by Noric Couderc's avatar Noric Couderc
Browse files

Put together first example of using PyTorch

parent 3f8eba18
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Object detection for Autonomous Systems 1\n",
"\n",
"This notebook is for experiments with PyTorch and MobileNet.\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Checking PyTorch works\n",
"\n",
"The following commands are for checking that PyTorch works and if it has access to CUDA (it's okay if it doesn't, computations will just be slower)."
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"tensor([[0.1515, 0.3917, 0.5072],\n",
" [0.3883, 0.5699, 0.2542],\n",
" [0.5839, 0.0770, 0.5118],\n",
" [0.4036, 0.8600, 0.9584],\n",
" [0.3664, 0.3906, 0.1743]])\n"
]
}
],
"source": [
"from __future__ import print_function\n",
"import torch\n",
"x = torch.rand(5, 3)\n",
"print(x)\n"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"False"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import torch\n",
"torch.cuda.is_available()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Loading MobileNet v2\n",
"\n",
"The commands in the following section are borrowed from [this page](https://pytorch.org/hub/pytorch_vision_mobilenet_v2/)."
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Downloading: \"https://github.com/pytorch/vision/archive/v0.5.0.zip\" to /home/noric/.cache/torch/hub/v0.5.0.zip\n",
"Downloading: \"https://download.pytorch.org/models/mobilenet_v2-b0353104.pth\" to /home/noric/.cache/torch/checkpoints/mobilenet_v2-b0353104.pth\n",
"100.0%\n"
]
},
{
"data": {
"text/plain": [
"MobileNetV2(\n",
" (features): Sequential(\n",
" (0): ConvBNReLU(\n",
" (0): Conv2d(3, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
" (1): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (1): InvertedResidual(\n",
" (conv): Sequential(\n",
" (0): ConvBNReLU(\n",
" (0): Conv2d(32, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=32, bias=False)\n",
" (1): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (1): Conv2d(32, 16, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (2): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" )\n",
" )\n",
" (2): InvertedResidual(\n",
" (conv): Sequential(\n",
" (0): ConvBNReLU(\n",
" (0): Conv2d(16, 96, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (1): BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (1): ConvBNReLU(\n",
" (0): Conv2d(96, 96, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=96, bias=False)\n",
" (1): BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (2): Conv2d(96, 24, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (3): BatchNorm2d(24, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" )\n",
" )\n",
" (3): InvertedResidual(\n",
" (conv): Sequential(\n",
" (0): ConvBNReLU(\n",
" (0): Conv2d(24, 144, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (1): BatchNorm2d(144, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (1): ConvBNReLU(\n",
" (0): Conv2d(144, 144, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=144, bias=False)\n",
" (1): BatchNorm2d(144, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (2): Conv2d(144, 24, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (3): BatchNorm2d(24, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" )\n",
" )\n",
" (4): InvertedResidual(\n",
" (conv): Sequential(\n",
" (0): ConvBNReLU(\n",
" (0): Conv2d(24, 144, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (1): BatchNorm2d(144, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (1): ConvBNReLU(\n",
" (0): Conv2d(144, 144, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=144, bias=False)\n",
" (1): BatchNorm2d(144, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (2): Conv2d(144, 32, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (3): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" )\n",
" )\n",
" (5): InvertedResidual(\n",
" (conv): Sequential(\n",
" (0): ConvBNReLU(\n",
" (0): Conv2d(32, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (1): BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (1): ConvBNReLU(\n",
" (0): Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=192, bias=False)\n",
" (1): BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (2): Conv2d(192, 32, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (3): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" )\n",
" )\n",
" (6): InvertedResidual(\n",
" (conv): Sequential(\n",
" (0): ConvBNReLU(\n",
" (0): Conv2d(32, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (1): BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (1): ConvBNReLU(\n",
" (0): Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=192, bias=False)\n",
" (1): BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (2): Conv2d(192, 32, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (3): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" )\n",
" )\n",
" (7): InvertedResidual(\n",
" (conv): Sequential(\n",
" (0): ConvBNReLU(\n",
" (0): Conv2d(32, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (1): BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (1): ConvBNReLU(\n",
" (0): Conv2d(192, 192, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=192, bias=False)\n",
" (1): BatchNorm2d(192, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (2): Conv2d(192, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (3): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" )\n",
" )\n",
" (8): InvertedResidual(\n",
" (conv): Sequential(\n",
" (0): ConvBNReLU(\n",
" (0): Conv2d(64, 384, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (1): BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (1): ConvBNReLU(\n",
" (0): Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=384, bias=False)\n",
" (1): BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (2): Conv2d(384, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (3): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" )\n",
" )\n",
" (9): InvertedResidual(\n",
" (conv): Sequential(\n",
" (0): ConvBNReLU(\n",
" (0): Conv2d(64, 384, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (1): BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (1): ConvBNReLU(\n",
" (0): Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=384, bias=False)\n",
" (1): BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (2): Conv2d(384, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (3): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" )\n",
" )\n",
" (10): InvertedResidual(\n",
" (conv): Sequential(\n",
" (0): ConvBNReLU(\n",
" (0): Conv2d(64, 384, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (1): BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (1): ConvBNReLU(\n",
" (0): Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=384, bias=False)\n",
" (1): BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (2): Conv2d(384, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (3): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" )\n",
" )\n",
" (11): InvertedResidual(\n",
" (conv): Sequential(\n",
" (0): ConvBNReLU(\n",
" (0): Conv2d(64, 384, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (1): BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (1): ConvBNReLU(\n",
" (0): Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=384, bias=False)\n",
" (1): BatchNorm2d(384, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (2): Conv2d(384, 96, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (3): BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" )\n",
" )\n",
" (12): InvertedResidual(\n",
" (conv): Sequential(\n",
" (0): ConvBNReLU(\n",
" (0): Conv2d(96, 576, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (1): BatchNorm2d(576, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (1): ConvBNReLU(\n",
" (0): Conv2d(576, 576, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=576, bias=False)\n",
" (1): BatchNorm2d(576, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (2): Conv2d(576, 96, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (3): BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" )\n",
" )\n",
" (13): InvertedResidual(\n",
" (conv): Sequential(\n",
" (0): ConvBNReLU(\n",
" (0): Conv2d(96, 576, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (1): BatchNorm2d(576, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (1): ConvBNReLU(\n",
" (0): Conv2d(576, 576, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=576, bias=False)\n",
" (1): BatchNorm2d(576, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (2): Conv2d(576, 96, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (3): BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" )\n",
" )\n",
" (14): InvertedResidual(\n",
" (conv): Sequential(\n",
" (0): ConvBNReLU(\n",
" (0): Conv2d(96, 576, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (1): BatchNorm2d(576, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (1): ConvBNReLU(\n",
" (0): Conv2d(576, 576, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), groups=576, bias=False)\n",
" (1): BatchNorm2d(576, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (2): Conv2d(576, 160, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (3): BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" )\n",
" )\n",
" (15): InvertedResidual(\n",
" (conv): Sequential(\n",
" (0): ConvBNReLU(\n",
" (0): Conv2d(160, 960, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (1): BatchNorm2d(960, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (1): ConvBNReLU(\n",
" (0): Conv2d(960, 960, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=960, bias=False)\n",
" (1): BatchNorm2d(960, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (2): Conv2d(960, 160, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (3): BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" )\n",
" )\n",
" (16): InvertedResidual(\n",
" (conv): Sequential(\n",
" (0): ConvBNReLU(\n",
" (0): Conv2d(160, 960, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (1): BatchNorm2d(960, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (1): ConvBNReLU(\n",
" (0): Conv2d(960, 960, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=960, bias=False)\n",
" (1): BatchNorm2d(960, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (2): Conv2d(960, 160, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (3): BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" )\n",
" )\n",
" (17): InvertedResidual(\n",
" (conv): Sequential(\n",
" (0): ConvBNReLU(\n",
" (0): Conv2d(160, 960, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (1): BatchNorm2d(960, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (1): ConvBNReLU(\n",
" (0): Conv2d(960, 960, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=960, bias=False)\n",
" (1): BatchNorm2d(960, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" (2): Conv2d(960, 320, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (3): BatchNorm2d(320, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" )\n",
" )\n",
" (18): ConvBNReLU(\n",
" (0): Conv2d(320, 1280, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
" (1): BatchNorm2d(1280, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
" (2): ReLU6(inplace=True)\n",
" )\n",
" )\n",
" (classifier): Sequential(\n",
" (0): Dropout(p=0.2, inplace=False)\n",
" (1): Linear(in_features=1280, out_features=1000, bias=True)\n",
" )\n",
")"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Downloading the model from the internet and printing info about it.\n",
"\n",
"model = torch.hub.load('pytorch/vision:v0.5.0', 'mobilenet_v2', pretrained=True)\n",
"model.eval()"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"# Download an example image from the pytorch website\n",
"import urllib\n",
"url, filename = (\"https://github.com/pytorch/hub/raw/master/dog.jpg\", \"dog.jpg\")\n",
"try: urllib.URLopener().retrieve(url, filename)\n",
"except: urllib.request.urlretrieve(url, filename)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The command above downloaded a file and put it in `dog.jpg` in the the notebook directory.\n",
"\n",
"The dog is cute."
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"tensor([ 2.0977e+00, -1.7348e+00, -2.2355e+00, -2.9669e+00, -2.3805e+00,\n",
" 9.7397e-01, -1.6049e+00, 3.6914e+00, 6.3812e+00, -1.2929e+00,\n",
" -6.7555e+00, -3.3525e+00, -7.9619e+00, -4.4554e+00, -5.6423e+00,\n",
" -4.6624e+00, -1.9577e+00, -3.5811e-01, -1.2812e+00, -4.6707e+00,\n",
" -3.2935e+00, -2.5674e+00, -2.4351e+00, -1.3017e+00, -3.2453e+00,\n",
" -1.4237e+00, -1.2001e+00, 4.1274e-01, -1.6093e+00, 1.5871e+00,\n",
" 2.7725e-01, -6.2652e-01, -2.9734e-01, -3.8219e+00, -1.5450e+00,\n",
" -2.8976e+00, -5.6528e-01, -2.3938e+00, -3.3704e-01, 1.2809e+00,\n",
" -1.2516e+00, -2.6469e+00, -3.1011e+00, -2.2447e+00, -4.4385e-01,\n",
" -1.2620e+00, 8.2895e-01, -2.0436e+00, -6.6037e-01, -8.6523e-02,\n",
" 4.8967e-01, -1.7190e+00, -7.7943e-01, -1.1046e+00, -5.3857e-01,\n",
" -2.9254e+00, -1.9327e+00, -2.7273e+00, -6.0903e-01, -1.6802e+00,\n",
" 1.3443e+00, -4.2062e+00, -1.4768e+00, -4.5581e+00, -3.2726e+00,\n",
" -4.0086e+00, 1.5702e-01, -1.9921e+00, -7.4553e-01, -4.2230e+00,\n",
" -3.8855e+00, -9.4837e-01, -2.1373e+00, -3.5562e+00, -2.4602e+00,\n",
" -3.2339e+00, -3.1414e+00, -2.6786e+00, -3.4321e-01, 1.3021e+00,\n",
" -1.8081e+00, -6.9590e-01, 6.0993e-01, 1.0629e+00, 7.0008e-01,\n",
" -2.1399e+00, -8.3321e-01, -1.2712e+00, -2.9445e+00, 2.6159e+00,\n",
" -3.0859e+00, -3.9236e+00, -5.0966e+00, -2.6899e+00, -4.2541e+00,\n",
" -5.7496e+00, -1.4612e+00, -3.3379e+00, -5.1146e+00, 1.3199e-01,\n",
" 6.4283e-01, -3.8178e+00, -6.0668e-01, -3.8236e+00, 8.3375e+00,\n",
" -2.0877e-01, 1.5727e+00, -3.2569e+00, -1.3088e+00, -4.3541e+00,\n",
" -1.8997e+00, -4.4558e+00, -2.6055e+00, -4.8200e-01, 6.5847e-01,\n",
" -1.1139e+00, -1.9922e+00, -2.9861e+00, 6.3541e-02, -4.1181e-02,\n",
" -4.9799e+00, -3.5623e-01, 6.9234e-02, 2.6185e-02, 1.0603e+00,\n",
" -3.7500e+00, -2.2764e+00, -1.1260e+00, -3.6557e+00, -1.0141e+00,\n",
" -3.8173e+00, -2.6216e+00, 8.1060e-01, -4.8440e+00, -1.8047e-01,\n",
" -3.7860e+00, -2.7942e+00, -4.0452e+00, -2.7590e+00, -7.6154e+00,\n",
" -6.3294e+00, -3.7503e+00, -4.7911e+00, -2.9558e+00, -1.1693e+00,\n",
" -1.1443e+00, 4.1418e-01, -2.1690e+00, -2.2886e+00, -1.6754e+00,\n",
" -2.3970e+00, 6.1470e+00, 6.3288e+00, 4.1129e+00, 5.7730e+00,\n",
" 1.3963e+00, 3.3142e-01, 8.0281e+00, 1.5755e+00, -1.1558e-02,\n",
" 3.4568e-02, -7.4600e-01, 5.0004e-01, -1.5472e+00, -2.2847e+00,\n",
" -3.6398e+00, -6.4526e-01, -2.2491e+00, 1.6223e-01, 4.6008e+00,\n",
" 3.4056e+00, 7.1302e-02, -1.5552e+00, 5.0138e+00, 5.8507e+00,\n",
" 1.5491e+00, -2.0445e+00, 2.4170e+00, -2.4798e+00, 1.2948e+00,\n",
" 1.9327e+00, -2.7864e+00, 1.2956e+00, 2.8686e-01, 2.5281e+00,\n",
" 4.3597e+00, 5.6367e+00, -6.1374e-01, 3.3545e+00, -2.7333e-01,\n",
" 2.3449e+00, -1.3377e+00, 5.4958e+00, 4.5115e+00, 2.6891e-01,\n",
" 1.1485e+00, -1.2563e-01, 1.1168e+00, 6.3621e-02, 6.5230e+00,\n",
" 2.4672e+00, 4.8819e-01, -3.3207e-01, 9.2125e+00, 1.6410e+00,\n",
" 1.3188e+00, 1.9943e-01, 4.5677e+00, 2.1875e+00, 2.2087e-01,\n",
" -2.4104e+00, 6.0055e-01, 3.0084e+00, -5.4444e-01, -1.2983e+00,\n",
" 2.8078e+00, 1.9668e+00, 2.0604e+00, 7.7009e-01, 1.6875e+00,\n",
" 1.9477e+00, -7.9099e-01, 7.2126e+00, 9.0092e+00, 8.0076e+00,\n",
" 2.3563e+00, 3.7544e+00, 5.7382e+00, 3.8081e+00, 6.5486e+00,\n",
" 9.7228e+00, 1.0013e+01, 9.5305e+00, 2.3904e+00, -5.0377e-01,\n",
" 5.1103e+00, 5.7608e-01, 1.5447e+00, 1.2857e+00, 3.5652e+00,\n",
" 2.5450e+00, 1.4888e+00, -8.1306e-01, -2.7417e+00, 8.7089e-01,\n",
" -5.8501e-01, -4.0766e-01, 3.2671e+00, 9.6768e+00, 8.9972e+00,\n",
" 9.2585e+00, 1.4055e+00, 1.1183e-01, 2.6599e+00, -1.4223e+00,\n",
" 1.1165e+00, 5.2566e+00, 9.9246e+00, 1.4355e+01, 1.1880e+01,\n",
" 5.9999e+00, 1.0196e+01, -3.5385e-01, 8.2910e+00, 6.0059e+00,\n",
" 6.8347e-01, 4.1931e-01, 2.0777e+00, -5.5983e-01, 4.8030e+00,\n",
" 9.0072e+00, 1.7113e+00, 2.3010e+00, 4.5991e+00, 3.6477e+00,\n",
" -9.0076e-01, 8.6038e-01, 4.4647e+00, 2.8593e+00, 9.2391e+00,\n",
" 3.4026e+00, 3.9940e+00, 3.7692e+00, 7.0309e+00, 3.9052e+00,\n",
" 4.3277e+00, -3.0220e-01, 3.9064e+00, 1.5825e-01, 1.4127e+00,\n",
" -2.0500e+00, -1.2154e+00, 1.4670e+00, -1.4580e+00, -1.5738e+00,\n",
" -1.3263e+00, 9.2756e-02, -2.6180e+00, -1.4926e-01, -7.5718e-01,\n",
" -5.3535e+00, -3.6209e+00, -2.3243e+00, -1.6823e+00, -3.7550e+00,\n",
" -3.8941e+00, -7.9255e-01, -1.5161e+00, -5.7227e+00, -2.8694e+00,\n",
" -7.2898e-01, -4.1100e-01, -7.2024e-01, 3.7914e-02, -1.0243e+00,\n",
" -3.3016e+00, -3.5730e+00, -1.9918e+00, -1.6733e+00, -5.8971e+00,\n",
" -5.3597e+00, -3.9750e+00, -3.6543e+00, -4.1491e+00, -2.9045e+00,\n",
" -1.0445e+00, -2.6859e+00, -1.9066e+00, -2.3439e+00, -6.2355e-01,\n",
" 3.5224e+00, 6.2726e+00, 8.5430e+00, 5.0669e+00, 1.8273e+00,\n",
" 4.6736e+00, 8.4476e-01, -1.9572e-02, 3.3690e+00, -1.0283e-01,\n",
" -2.4612e+00, 2.4195e+00, -5.1352e-01, -3.5476e+00, -3.5470e+00,\n",
" 2.0303e-01, -1.3766e+00, -2.6088e+00, 2.4946e+00, -2.3945e+00,\n",
" -1.1865e+00, -4.9946e+00, -3.3072e+00, -4.2007e-01, -3.7238e+00,\n",
" 5.3880e+00, 3.4567e+00, 1.0361e+00, 3.7673e+00, 4.1483e+00,\n",
" -6.8158e-01, 2.9724e+00, -2.6524e-01, 8.0892e-01, -1.7007e+00,\n",
" -2.0444e+00, -2.9546e+00, -3.2660e+00, -8.8699e-01, -3.3023e+00,\n",
" -3.3806e-01, 1.2563e+00, -8.9535e-01, 1.4541e+00, 9.4996e-01,\n",
" -2.0507e+00, -4.4067e+00, 2.2796e+00, -6.8594e-01, -8.8652e-01,\n",
" 1.5170e+00, -3.8388e-01, -4.0742e-01, 9.6686e-01, 1.2590e+00,\n",
" -4.6555e+00, -6.3739e+00, 7.4363e-01, -1.3343e+00, -7.2346e-02,\n",
" -2.0423e+00, 3.7000e-01, -3.5627e+00, -1.9930e+00, -1.8647e+00,\n",
" 1.3201e+00, -3.7614e+00, -2.9709e+00, -1.0222e+00, -1.9434e+00,\n",
" -2.9945e+00, 2.2073e-01, 1.7212e+00, -2.3659e+00, -1.2634e+00,\n",
" -1.0104e+00, -4.0589e+00, 2.7514e-02, 1.4427e+00, 7.0132e-02,\n",
" -3.4568e+00, 1.0521e+00, 3.5129e+00, 6.4661e-01, -1.4407e+00,\n",
" -2.4270e-01, -3.5198e+00, -9.4175e-01, -1.0295e+00, 3.3603e-01,\n",
" 2.0016e-01, -8.9978e-02, 2.2656e-01, -9.6323e-01, -1.7674e+00,\n",
" 6.4673e-01, -3.3782e-02, 4.2802e+00, 5.3263e+00, 2.2384e+00,\n",
" -3.5597e+00, 1.1807e+00, 4.5926e-02, -2.6271e+00, 1.0758e-01,\n",
" 1.2795e+00, 1.5737e+00, 1.1931e+00, -2.4457e+00, -8.4555e-01,\n",
" 2.8623e+00, 2.8806e-01, -5.9178e-01, -9.3240e-01, 1.4194e+00,\n",
" -1.0869e+00, 8.0468e-01, 2.6026e+00, -1.2723e+00, -7.7224e-01,\n",
" 2.2809e+00, -1.4838e+00, 5.8849e-01, -9.0864e-01, -2.9350e+00,\n",
" -1.5003e+00, 2.7366e+00, 2.9118e+00, -2.5303e-01, -1.1096e+00,\n",
" 2.3799e+00, 7.6073e-01, 5.4272e+00, 4.6897e+00, -1.3155e-01,\n",
" -8.0122e-01, -3.1159e+00, -4.0464e+00, 6.5657e-01, 2.0462e+00,\n",
" 2.7152e+00, 2.0442e+00, 1.9868e+00, -1.4363e+00, -2.0505e+00,\n",
" -1.6228e+00, -7.0752e-01, 6.1674e-01, 2.5873e+00, 2.1843e+00,\n",
" -6.1809e-01, -1.9566e+00, -8.5768e-01, 7.1875e-01, 2.1852e+00,\n",
" 2.9276e-01, -2.9160e+00, 4.7730e-01, 2.0784e+00, 4.0771e+00,\n",
" -2.4195e-01, 1.3048e+00, -5.0637e-01, -3.4089e-01, 6.6665e-01,\n",
" -1.1453e+00, 1.2197e+00, -1.6476e+00, 1.0244e+00, -1.2751e-02,\n",
" -3.8730e+00, 1.3927e+00, 3.1080e+00, 2.1766e+00, 1.2397e-02,\n",
" 8.3346e-01, 6.3474e-01, 6.4802e-01, 1.8221e+00, -2.7608e+00,\n",
" -3.6870e+00, 1.3747e+00, 9.1805e-02, 8.2602e-01, -1.5124e+00,\n",
" 2.2673e+00, 1.3143e-01, -2.9091e+00, -1.5961e-01, 2.5890e+00,\n",
" -1.2447e+00, -1.2865e+00, 5.8338e+00, 1.6895e+00, -2.0549e-01,\n",
" -1.0211e+00, -2.4552e+00, -5.0936e-01, -5.2974e-01, -4.5987e-01,\n",
" 2.6499e-01, -2.9162e+00, -3.0222e+00, 2.2564e-01, 1.5582e+00,\n",
" -1.2585e-01, -1.8132e+00, 2.3501e+00, -3.3209e+00, 4.9566e+00,\n",
" -4.9510e+00, -7.4566e-01, 2.1571e+00, 7.6538e-01, 2.0662e+00,\n",
" 1.6266e+00, -1.2199e-01, -4.3637e+00, -2.1941e+00, -3.0787e-01,\n",
" -3.8238e+00, -1.6580e+00, 4.3411e+00, 2.7014e-01, -3.5598e-01,\n",
" -2.5363e-01, -4.9809e-01, 2.2959e+00, 2.2455e+00, 8.4955e-01,\n",
" -9.5632e-01, -1.0922e-02, 2.0119e-01, 6.2614e-01, -2.0728e+00,\n",
" -2.8902e+00, 1.5548e+00, 7.2548e-01, -1.0286e+00, 4.4618e-02,\n",
" -1.0328e+00, -6.1001e-02, 8.5735e-02, 2.3276e+00, 2.5550e+00,\n",
" 1.4773e+00, -2.1379e+00, 8.2632e-01, 9.0537e-01, 5.8717e-01,\n",
" 1.4047e-01, 1.2880e+00, -1.2447e+00, -3.0260e+00, 1.3479e+00,\n",
" 6.5378e-01, 1.0272e+00, 1.3767e+00, 1.0319e+00, 4.7779e-01,\n",
" 5.2349e-01, -8.8269e-01, -1.4962e+00, -1.1824e+00, 1.1074e+00,\n",
" -2.2532e+00, 2.1382e+00, 9.7307e-01, -1.0083e+00, -1.6575e+00,\n",
" -9.5758e-04, 1.6627e+00, -2.1514e+00, -2.3709e+00, -3.4100e+00,\n",
" -5.2928e-01, 1.9259e-01, -8.0650e-01, 1.6568e+00, -5.1726e-01,\n",
" -1.5838e-01, -8.9716e-01, -4.5829e+00, 1.4675e+00, -4.5201e-01,\n",
" 1.2436e+00, 6.0141e-01, -3.8649e-01, -1.7420e+00, -1.5792e+00,\n",
" 9.3821e-01, 4.2496e+00, 1.1979e+00, -2.4132e-01, -3.2378e+00,\n",
" -1.0954e+00, -9.6521e-02, 1.8683e+00, 1.1284e+00, 2.0860e+00,\n",
" 9.5167e-01, 1.8403e+00, 2.5347e-01, 1.8567e-01, -1.2812e+00,\n",
" -6.6936e-01, 2.0522e+00, 1.4342e+00, -2.2879e-01, -8.0857e-01,\n",
" 1.9737e+00, 1.1951e+00, -8.6991e-01, -6.7026e-01, 5.4058e-01,\n",
" -4.6110e-01, 2.6557e+00, -1.3025e+00, -1.6542e+00, -1.3156e+00,\n",
" 3.2369e-01, -5.4370e-01, 1.6666e+00, 4.5700e+00, -2.3573e+00,\n",
" 1.2469e+00, 1.1997e+00, 4.0686e+00, -7.7598e-03, -1.1327e+00,\n",
" 1.3139e+00, -3.5416e-01, -6.1601e-01, -3.9956e+00, 1.6335e+00,\n",
" -1.0202e+00, -4.5716e-01, 5.7760e-01, -3.4985e+00, -1.1559e-01,\n",
" -4.9487e-01, 6.6963e-01, -6.2700e-01, 1.0441e+00, 3.0509e+00,\n",
" -3.1207e-01, 2.7649e+00, 6.2701e-01, -1.7978e+00, -2.0636e+00,\n",
" 6.7859e-01, 1.0197e-01, -1.0705e+00, 3.0527e-01, -1.7576e+00,\n",
" 1.1912e+00, -2.5650e+00, -2.0395e+00, -2.3384e-02, -6.6742e-01,\n",
" -1.3196e+00, -1.5473e+00, 1.4445e+00, 4.8465e-01, -2.8597e-01,\n",
" 2.0573e+00, 1.8837e+00, -4.4886e-01, -5.9843e-01, 1.4961e+00,\n",
" 4.2709e+00, 4.7194e-02, -3.9729e+00, 1.6866e+00, 1.1838e-01,\n",
" -2.1901e+00, -1.4564e+00, 6.3495e-01, -6.9224e-02, -1.1540e-01,\n",
" -2.6747e+00, -1.8754e+00, -3.3249e-02, -1.3887e+00, -5.8239e-01,\n",
" -2.6332e+00, 2.9077e+00, 7.7976e-01, 1.2104e+00, 1.8161e+00,\n",
" -1.2151e+00, 1.0008e+00, 2.1955e+00, 3.8305e+00, -1.8397e+00,\n",
" 5.7438e-01, -3.9989e+00, -2.8352e-01, 2.3984e+00, -1.1624e+00,\n",
" 7.7795e-01, 4.9292e+00, -1.8727e+00, 4.6069e+00, -4.2315e+00,\n",
" 2.9490e-01, -2.9247e+00, -4.1031e-01, -2.6956e-01, -3.2782e+00,\n",
" 5.1224e-02, -1.1451e+00, 1.9002e+00, -3.3774e+00, 3.5610e+00,\n",
" -5.8406e-01, -2.5784e-01, 2.5958e+00, 4.8148e-01, 3.4009e+00,\n",
" 1.9466e+00, 4.3969e+00, 3.1025e+00, 1.8707e+00, 2.3226e+00,\n",
" -1.7758e+00, 4.5991e+00, -3.1057e-01, 2.5523e+00, -4.3005e-01,\n",
" 1.1663e+00, 3.7423e+00, -3.3202e+00, 2.9863e+00, 2.3991e+00,\n",
" 1.8163e+00, 6.2833e-01, -3.0631e+00, 1.3832e+00, 2.3446e-01,\n",
" 4.5703e-01, -5.2095e-01, -6.3540e-01, -2.3105e+00, -9.3954e-02,\n",
" 4.7082e-01, -4.4529e-01, 3.7371e-01, -1.8174e+00, -1.6152e+00,\n",
" -3.7139e-01, -5.7884e-01, 1.7545e+00, 7.7587e-01, -7.8316e-02,\n",
" 1.6373e-02, -2.9957e+00, 1.5097e+00, -1.9589e+00, -2.1457e+00,\n",
" 8.6147e-01, 1.3731e+00, 4.8385e+00, 1.1895e+00, -5.1667e-02,\n",
" -6.9186e-01, -2.4000e+00, 2.2452e+00, -2.8816e+00, 2.9604e-01,\n",
" -2.4871e-02, -1.6365e+00, 5.4108e-01, 7.9194e-01, -4.7307e-01,\n",
" 3.3787e+00, 3.1171e-01, -4.7706e-01, 4.7993e-01, -2.1351e+00,\n",
" -1.1842e+00, 2.8761e+00, -2.5128e+00, 6.7475e-01, 4.2304e-01,\n",
" -1.5113e-01, -2.4046e-01, 2.6086e+00, 2.2590e+00, -1.3191e+00,\n",
" -1.4585e+00, -1.7858e-01, 2.4934e-02, 3.6163e+00, 1.1935e+00,\n",
" 1.5226e+00, 1.1416e+00, 1.3794e+00, 1.2284e+00, -3.1586e+00,\n",
" 1.8730e+00, 5.6497e-01, 3.5441e-01, 3.7591e+00, -7.5653e-01,\n",
" 1.4521e+00, 9.2728e-01, 1.7928e+00, 1.3086e+00, 3.3281e-01,\n",
" 2.5021e+00, 1.2359e-01, -1.9837e+00, -1.9212e+00, 2.2389e+00,\n",
" 1.7770e+00, -1.0251e+00, 1.1154e+00, -5.6650e-01, 8.5459e-01,\n",
" -9.2451e-01, 1.7175e+00, 5.5464e+00, -1.1396e+00, -1.0508e+00,\n",
" 9.4548e-01, -1.1248e+00, -4.1178e+00, -1.5001e+00, -2.4440e+00,\n",
" -2.2460e+00, 2.9526e-01, 6.0924e-02, -6.5618e-02, -3.6811e-01,\n",
" -3.9457e+00, -2.5774e-01, 1.0757e+00, 2.7218e-01, 1.2959e-01,\n",
" 2.9834e+00, 1.5601e+00, -1.3121e+00, -3.4189e+00, -4.8114e+00,\n",
" -7.6683e-02, 2.9641e+00, -1.4930e+00, -1.4022e+00, 3.6878e+00,\n",
" 2.2555e+00, 2.3702e-01, 3.8400e+00, -5.7365e-01, -2.9713e+00,\n",
" -1.8238e+00, -2.4696e-01, -5.4835e+00, -1.7653e+00, -1.3388e-01,\n",
" -3.1702e+00, -1.5837e+00, -6.3625e-01, -4.9745e-01, -1.8403e+00,\n",
" 1.0520e+00, 1.6680e+00, 1.8475e+00, 4.0332e+00, 1.3918e+00,\n",
" -3.2777e+00, -4.6016e-01, 1.0744e+00, -3.8841e-01, 2.3637e+00,\n",
" -5.5233e-02, -1.5773e-01, 1.7894e+00, 8.5378e-01, 6.7841e-01,\n",
" 7.2238e-01, 7.0537e-01, 3.4570e+00, 3.3763e-01, -1.2732e+00,\n",
" -1.6014e+00, -1.1200e-01, -5.0247e+00, -9.6371e-01, -2.9871e-01,\n",
" -5.2167e-01, -2.2425e+00, -3.0319e+00, -1.8120e-01, -1.8274e+00,\n",
" -2.9185e+00, -5.3206e+00, -4.1687e-01, -1.1471e+00, 4.0239e+00,\n",
" 1.6161e+00, -7.2602e-01, 2.2094e+00, -3.8634e+00, -2.6027e-01,\n",
" 1.3907e+00, 2.0412e+00, 1.4310e+00, -5.5395e-01, 8.4763e-01,\n",
" 1.2557e+00, -2.3636e+00, 5.4899e-01, 2.3557e-01, -2.1998e+00,\n",
" -3.3631e+00, -2.3922e+00, -1.7042e+00, 2.6087e+00, 1.6107e+00,\n",
" -3.4292e-01, -4.9724e-01, -3.1873e+00, -4.1049e-01, -1.3927e+00,\n",
" -3.3978e-01, -1.3094e+00, 3.4262e-01, 2.9870e+00, -1.0335e+00,\n",
" -5.0580e-01, 7.1299e-01, -1.3783e+00, -2.2482e+00, -3.8999e+00,\n",
" -2.0363e+00, 1.1213e+00, -4.8775e+00, 3.2949e-01, 1.5133e+00,\n",
" 1.1563e+00, 2.0571e+00, 5.9265e-01, -3.9987e+00, -7.2273e-01,\n",
" 2.9365e+00, -1.7806e+00, 1.8060e+00, -2.9915e-01, -1.3355e+00,\n",
" 4.0328e-01, 1.8246e-02, -1.7064e-01, -2.5046e+00, -2.3489e-01,\n",
" -5.1013e-01, -7.8851e-01, 1.9875e+00, -2.5939e-01, -2.7370e+00,\n",
" 6.9350e-01, -1.7628e+00, -5.1119e-01, -6.8332e+00, -4.1562e+00,\n",
" -2.5831e+00, -1.4274e+00, -3.0766e+00, 1.6652e+00, 3.3317e+00])\n",
"tensor([3.9432e-06, 8.5398e-08, 5.1760e-08, 2.4907e-08, 4.4774e-08, 1.2819e-06,\n",
" 9.7241e-08, 1.9409e-05, 2.8588e-04, 1.3284e-07, 5.6362e-10, 1.6938e-08,\n",
" 1.6868e-10, 5.6221e-09, 1.7157e-09, 4.5706e-09, 6.8331e-08, 3.3832e-07,\n",
" 1.3440e-07, 4.5330e-09, 1.7967e-08, 3.7139e-08, 4.2395e-08, 1.3168e-07,\n",
" 1.8854e-08, 1.1656e-07, 1.4577e-07, 7.3131e-07, 9.6812e-08, 2.3665e-06,\n",
" 6.3864e-07, 2.5867e-07, 3.5951e-07, 1.0593e-08, 1.0324e-07, 2.6695e-08,\n",
" 2.7501e-07, 4.4179e-08, 3.4552e-07, 1.7424e-06, 1.3845e-07, 3.4300e-08,\n",
" 2.1779e-08, 5.1287e-08, 3.1052e-07, 1.3701e-07, 1.1088e-06, 6.2707e-08,\n",
" 2.5006e-07, 4.4388e-07, 7.8978e-07, 8.6757e-08, 2.2200e-07, 1.6037e-07,\n",
" 2.8245e-07, 2.5964e-08, 7.0061e-08, 3.1653e-08, 2.6324e-07, 9.0185e-08,\n",
" 1.8563e-06, 7.2130e-09, 1.1053e-07, 5.0733e-09, 1.8347e-08, 8.7892e-09,\n",
" 5.6629e-07, 6.6024e-08, 2.2965e-07, 7.0927e-09, 9.9400e-09, 1.8749e-07,\n",
" 5.7098e-08, 1.3817e-08, 4.1344e-08, 1.9071e-08, 2.0919e-08, 3.3230e-08,\n",
" 3.4340e-07, 1.7796e-06, 7.9357e-08, 2.4134e-07, 8.9071e-07, 1.4011e-06,\n",
" 9.7474e-07, 5.6951e-08, 2.1037e-07, 1.3576e-07, 2.5472e-08, 6.6207e-06,\n",
" 2.2114e-08, 9.5691e-09, 2.9609e-09, 3.2859e-08, 6.8756e-09, 1.5411e-09,\n",
" 1.1226e-07, 1.7188e-08, 2.9081e-09, 5.5229e-07, 9.2050e-07, 1.0637e-08,\n",
" 2.6386e-07, 1.0575e-08, 2.0220e-03, 3.9281e-07, 2.3327e-06, 1.8639e-08,\n",
" 1.3075e-07, 6.2214e-09, 7.2417e-08, 5.6197e-09, 3.5750e-08, 2.9889e-07,\n",
" 9.3501e-07, 1.5889e-07, 6.6013e-08, 2.4436e-08, 5.1575e-07, 4.6447e-07,\n",
" 3.3275e-09, 3.3895e-07, 5.1870e-07, 4.9684e-07, 1.3974e-06, 1.1383e-08,\n",
" 4.9683e-08, 1.5698e-07, 1.2508e-08, 1.7556e-07, 1.0642e-08, 3.5180e-08,\n",
" 1.0886e-06, 3.8116e-09, 4.0408e-07, 1.0980e-08, 2.9603e-08, 8.4727e-09,\n",
" 3.0664e-08, 2.3852e-10, 8.6299e-10, 1.1379e-08, 4.0188e-09, 2.5185e-08,\n",
" 1.5032e-07, 1.5414e-07, 7.3236e-07, 5.5320e-08, 4.9083e-08, 9.0621e-08,\n",
" 4.4040e-08, 2.2618e-04, 2.7128e-04, 2.9585e-05, 1.5561e-04, 1.9554e-06,\n",