From fbbb034ca462bd0ffba3055ad0ebb80d094916a8 Mon Sep 17 00:00:00 2001 From: Jean-Luc Parouty <Jean-Luc.Parouty@simap.grenoble-inp.fr> Date: Tue, 31 Oct 2023 18:32:27 +0100 Subject: [PATCH] Major update to include pytorch and lightning notebooks --- AE/01-Prepare-MNIST-dataset.ipynb | 4 +- AE/02-AE-with-MNIST.ipynb | 4 +- AE/03-AE-with-MNIST-post.ipynb | 4 +- AE/04-ExtAE-with-MNIST.ipynb | 4 +- AE/05-ExtAE-with-MNIST.ipynb | 4 +- {BHPD => BHPD.Keras}/01-DNN-Regression.ipynb | 6 +- .../02-DNN-Regression-Premium.ipynb | 6 +- .../01-DNN-Regression_PyTorch.ipynb | 6 +- .../BostonHousing.csv | 0 .../modules}/fidle_pwk_additional.py | 0 BHPD_PyTorch/.gitkeep | 0 DCGAN-PyTorch/01-DCGAN-PL.ipynb | 4 +- DCGAN/01-DCGAN-Draw-me-a-sheep.ipynb | 4 +- DCGAN/02-WGANGP-Draw-me-a-sheep.ipynb | 4 +- DDPM/01-ddpm.ipynb | 4 +- DRL/FIDLE_DQNfromScratch.ipynb | 2 +- DRL/FIDLE_rl_baselines_zoo.ipynb | 2 +- GTSRB/01-Preparation-of-data.ipynb | 4 +- GTSRB/02-First-convolutions.ipynb | 4 +- GTSRB/03-Tracking-and-visualizing.ipynb | 4 +- GTSRB/04-Data-augmentation.ipynb | 4 +- GTSRB/05-Full-convolutions.ipynb | 4 +- GTSRB/06-Notebook-as-a-batch.ipynb | 4 +- GTSRB/07-Show-report.ipynb | 4 +- IMDB/01-One-hot-encoding.ipynb | 4 +- IMDB/02-Keras-embedding.ipynb | 4 +- IMDB/03-Prediction.ipynb | 4 +- IMDB/04-Show-vectors.ipynb | 4 +- IMDB/05-LSTM-Keras.ipynb | 4 +- LinearReg/01-Linear-Regression.ipynb | 4 +- LinearReg/02-Gradient-descent.ipynb | 4 +- LinearReg/03-Polynomial-Regression.ipynb | 4 +- LinearReg/04-Logistic-Regression.ipynb | 4 +- {MNIST => MNIST.Keras}/01-DNN-MNIST.ipynb | 6 +- {MNIST => MNIST.Keras}/02-CNN-MNIST.ipynb | 6 +- .../01-DNN-MNIST_Lightning.ipynb | 6 +- .../02-CNN-MNIST_Lightning.ipynb | 6 +- .../01-DNN-MNIST_PyTorch.ipynb | 6 +- .../modules}/fidle_pwk_additional.py | 0 MNIST_Lightning/.gitkeep | 0 MNIST_PyTorch/.gitkeep | 0 Misc/{Numpy.ipynb => 00-Numpy.ipynb} | 4 +- ...ns.ipynb => 01-Activation-Functions.ipynb} | 4 +- ...ing-pandas.ipynb => 02-Using-pandas.ipynb} | 4 +- Misc/03-Using-Pytorch.ipynb | 2753 +++++++++++++++++ ...board.ipynb => 04-Using-Tensorboard.ipynb} | 4 +- ...Scratchbook.ipynb => 99-Scratchbook.ipynb} | 4 +- ...rentissages-rapides-et-Optimisations.ipynb | 2 +- .../01-Simple-Perceptron.ipynb | 4 +- README.ipynb | 82 +- README.md | 70 +- SYNOP/LADYB1-Ladybug.ipynb | 4 +- SYNOP/SYNOP1-Preparation-of-data.ipynb | 4 +- SYNOP/SYNOP2-First-predictions.ipynb | 4 +- SYNOP/SYNOP3-12h-predictions.ipynb | 4 +- Transformers/01-Distilbert.ipynb | 2 +- Transformers/02-distilbert_colab.ipynb | 2 +- VAE/01-VAE-with-MNIST.ipynb | 4 +- VAE/02-VAE-with-MNIST.ipynb | 4 +- VAE/03-VAE-with-MNIST-post.ipynb | 4 +- .../01-DNN-Wine-Regression.ipynb | 6 +- .../01-DNN-Wine-Regression-lightning.ipynb | 10 +- .../modules}/data_load.py | 0 WineQuality-DNN_Reg-lightning/.gitkeep | 0 fidle/about.yml | 39 +- fidle/ci/default.yml | 81 +- fidle/img/00-Fidle-Anaconda.svg | 1 - fidle/img/00-Fidle-a-distance-01.svg | 1 - fidle/img/00-Fidle-header-01.svg | 1 - fidle/img/00-Fidle-logo-00.svg | 1 - fidle/img/00-Fidle-logo-01.svg | 1 - fidle/img/00-Fidle-titre-01.svg | 1 - fidle/img/ender.svg | 30 + fidle/img/header.svg | 125 + ...logo-01-80px.svg => logo-paysage-80px.svg} | 0 fidle/img/logo-paysage.svg | 27 + fidle/img/title.svg | 71 + 77 files changed, 3297 insertions(+), 213 deletions(-) rename {BHPD => BHPD.Keras}/01-DNN-Regression.ipynb (98%) rename {BHPD => BHPD.Keras}/02-DNN-Regression-Premium.ipynb (98%) rename {BHPD_PyTorch => BHPD.PyTorch}/01-DNN-Regression_PyTorch.ipynb (98%) rename {BHPD_PyTorch => BHPD.PyTorch}/BostonHousing.csv (100%) rename {BHPD_PyTorch => BHPD.PyTorch/modules}/fidle_pwk_additional.py (100%) delete mode 100644 BHPD_PyTorch/.gitkeep rename {MNIST => MNIST.Keras}/01-DNN-MNIST.ipynb (97%) rename {MNIST => MNIST.Keras}/02-CNN-MNIST.ipynb (97%) rename {MNIST_Lightning => MNIST.Lightning}/01-DNN-MNIST_Lightning.ipynb (98%) rename {MNIST_Lightning => MNIST.Lightning}/02-CNN-MNIST_Lightning.ipynb (98%) rename {MNIST_PyTorch => MNIST.PyTorch}/01-DNN-MNIST_PyTorch.ipynb (98%) rename {MNIST_PyTorch => MNIST.PyTorch/modules}/fidle_pwk_additional.py (100%) delete mode 100644 MNIST_Lightning/.gitkeep delete mode 100644 MNIST_PyTorch/.gitkeep rename Misc/{Numpy.ipynb => 00-Numpy.ipynb} (98%) rename Misc/{Activation-Functions.ipynb => 01-Activation-Functions.ipynb} (97%) rename Misc/{Using-pandas.ipynb => 02-Using-pandas.ipynb} (95%) create mode 100644 Misc/03-Using-Pytorch.ipynb rename Misc/{Using-Tensorboard.ipynb => 04-Using-Tensorboard.ipynb} (93%) rename Misc/{Scratchbook.ipynb => 99-Scratchbook.ipynb} (97%) rename {IRIS => Perceptron}/01-Simple-Perceptron.ipynb (97%) rename BHPD/03-DNN-Wine-Regression.ipynb => Wine.Keras/01-DNN-Wine-Regression.ipynb (98%) rename WineQuality-DNN_Reg-lightning/03-DNN-Wine-Regression-lightning.ipynb => Wine.Lightning/01-DNN-Wine-Regression-lightning.ipynb (97%) rename {WineQuality-DNN_Reg-lightning => Wine.Lightning/modules}/data_load.py (100%) delete mode 100644 WineQuality-DNN_Reg-lightning/.gitkeep delete mode 100644 fidle/img/00-Fidle-Anaconda.svg delete mode 100755 fidle/img/00-Fidle-a-distance-01.svg delete mode 100755 fidle/img/00-Fidle-header-01.svg delete mode 100755 fidle/img/00-Fidle-logo-00.svg delete mode 100755 fidle/img/00-Fidle-logo-01.svg delete mode 100755 fidle/img/00-Fidle-titre-01.svg create mode 100644 fidle/img/ender.svg create mode 100644 fidle/img/header.svg rename fidle/img/{00-Fidle-logo-01-80px.svg => logo-paysage-80px.svg} (100%) create mode 100644 fidle/img/logo-paysage.svg create mode 100644 fidle/img/title.svg diff --git a/AE/01-Prepare-MNIST-dataset.ipynb b/AE/01-Prepare-MNIST-dataset.ipynb index 15917f6..1ae11aa 100644 --- a/AE/01-Prepare-MNIST-dataset.ipynb +++ b/AE/01-Prepare-MNIST-dataset.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [AE1] - Prepare a noisy MNIST dataset\n", "<!-- DESC --> Episode 1: Preparation of a noisy MNIST dataset\n", @@ -210,7 +210,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/AE/02-AE-with-MNIST.ipynb b/AE/02-AE-with-MNIST.ipynb index 894707f..9f76f27 100644 --- a/AE/02-AE-with-MNIST.ipynb +++ b/AE/02-AE-with-MNIST.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [AE2] - Building and training an AE denoiser model\n", "<!-- DESC --> Episode 1 : Construction of a denoising autoencoder and training of it with a noisy MNIST dataset.\n", @@ -404,7 +404,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/AE/03-AE-with-MNIST-post.ipynb b/AE/03-AE-with-MNIST-post.ipynb index e75e413..1c83739 100644 --- a/AE/03-AE-with-MNIST-post.ipynb +++ b/AE/03-AE-with-MNIST-post.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [AE3] - Playing with our denoiser model\n", "<!-- DESC --> Episode 2 : Using the previously trained autoencoder to denoise data\n", @@ -273,7 +273,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/AE/04-ExtAE-with-MNIST.ipynb b/AE/04-ExtAE-with-MNIST.ipynb index 86c2c21..c1e7ced 100644 --- a/AE/04-ExtAE-with-MNIST.ipynb +++ b/AE/04-ExtAE-with-MNIST.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [AE4] - Denoiser and classifier model\n", "<!-- DESC --> Episode 4 : Construction of a denoiser and classifier model\n", @@ -500,7 +500,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/AE/05-ExtAE-with-MNIST.ipynb b/AE/05-ExtAE-with-MNIST.ipynb index 75309a3..f695e3a 100644 --- a/AE/05-ExtAE-with-MNIST.ipynb +++ b/AE/05-ExtAE-with-MNIST.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [AE5] - Advanced denoiser and classifier model\n", "<!-- DESC --> Episode 5 : Construction of an advanced denoiser and classifier model\n", @@ -532,7 +532,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/BHPD/01-DNN-Regression.ipynb b/BHPD.Keras/01-DNN-Regression.ipynb similarity index 98% rename from BHPD/01-DNN-Regression.ipynb rename to BHPD.Keras/01-DNN-Regression.ipynb index c6e704f..2e8e30e 100644 --- a/BHPD/01-DNN-Regression.ipynb +++ b/BHPD.Keras/01-DNN-Regression.ipynb @@ -6,10 +6,10 @@ "tags": [] }, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "\n", - "# <!-- TITLE --> [BHPD1] - Regression with a Dense Network (DNN)\n", + "# <!-- TITLE --> [KBHPD1] - Regression with a Dense Network (DNN)\n", "<!-- DESC --> Simple example of a regression with the dataset Boston Housing Prices Dataset (BHPD)\n", "<!-- AUTHOR : Jean-Luc Parouty (CNRS/SIMaP) -->\n", "\n", @@ -413,7 +413,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/BHPD/02-DNN-Regression-Premium.ipynb b/BHPD.Keras/02-DNN-Regression-Premium.ipynb similarity index 98% rename from BHPD/02-DNN-Regression-Premium.ipynb rename to BHPD.Keras/02-DNN-Regression-Premium.ipynb index 4e48b2c..389337b 100644 --- a/BHPD/02-DNN-Regression-Premium.ipynb +++ b/BHPD.Keras/02-DNN-Regression-Premium.ipynb @@ -4,9 +4,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", - "# <!-- TITLE --> [BHPD2] - Regression with a Dense Network (DNN) - Advanced code\n", + "# <!-- TITLE --> [KBHPD2] - Regression with a Dense Network (DNN) - Advanced code\n", " <!-- DESC --> A more advanced implementation of the precedent example\n", " <!-- AUTHOR : Jean-Luc Parouty (CNRS/SIMaP) -->\n", "\n", @@ -456,7 +456,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/BHPD_PyTorch/01-DNN-Regression_PyTorch.ipynb b/BHPD.PyTorch/01-DNN-Regression_PyTorch.ipynb similarity index 98% rename from BHPD_PyTorch/01-DNN-Regression_PyTorch.ipynb rename to BHPD.PyTorch/01-DNN-Regression_PyTorch.ipynb index 85213ff..e75b7ad 100644 --- a/BHPD_PyTorch/01-DNN-Regression_PyTorch.ipynb +++ b/BHPD.PyTorch/01-DNN-Regression_PyTorch.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "\n", "# <!-- TITLE --> [PBHPD1] - Regression with a Dense Network (DNN)\n", @@ -67,7 +67,7 @@ "\n", "import pandas as pd\n", "\n", - "from fidle_pwk_additional import convergence_history_MSELoss\n", + "from modules.fidle_pwk_additional import convergence_history_MSELoss\n", "\n", "import fidle\n", "\n", @@ -412,7 +412,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/BHPD_PyTorch/BostonHousing.csv b/BHPD.PyTorch/BostonHousing.csv similarity index 100% rename from BHPD_PyTorch/BostonHousing.csv rename to BHPD.PyTorch/BostonHousing.csv diff --git a/BHPD_PyTorch/fidle_pwk_additional.py b/BHPD.PyTorch/modules/fidle_pwk_additional.py similarity index 100% rename from BHPD_PyTorch/fidle_pwk_additional.py rename to BHPD.PyTorch/modules/fidle_pwk_additional.py diff --git a/BHPD_PyTorch/.gitkeep b/BHPD_PyTorch/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/DCGAN-PyTorch/01-DCGAN-PL.ipynb b/DCGAN-PyTorch/01-DCGAN-PL.ipynb index af0e5cc..fb29121 100644 --- a/DCGAN-PyTorch/01-DCGAN-PL.ipynb +++ b/DCGAN-PyTorch/01-DCGAN-PL.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [SHEEP3] - A DCGAN to Draw a Sheep, with Pytorch Lightning\n", "<!-- DESC --> Episode 1 : Draw me a sheep, revisited with a DCGAN, writing in Pytorch Lightning\n", @@ -434,7 +434,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/DCGAN/01-DCGAN-Draw-me-a-sheep.ipynb b/DCGAN/01-DCGAN-Draw-me-a-sheep.ipynb index e5b300a..ff676d8 100644 --- a/DCGAN/01-DCGAN-Draw-me-a-sheep.ipynb +++ b/DCGAN/01-DCGAN-Draw-me-a-sheep.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [SHEEP1] - A first DCGAN to Draw a Sheep\n", "<!-- DESC --> Episode 1 : Draw me a sheep, revisited with a DCGAN\n", @@ -382,7 +382,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/DCGAN/02-WGANGP-Draw-me-a-sheep.ipynb b/DCGAN/02-WGANGP-Draw-me-a-sheep.ipynb index 31e1511..dee76ea 100644 --- a/DCGAN/02-WGANGP-Draw-me-a-sheep.ipynb +++ b/DCGAN/02-WGANGP-Draw-me-a-sheep.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [SHEEP2] - A WGAN-GP to Draw a Sheep\n", "<!-- DESC --> Episode 2 : Draw me a sheep, revisited with a WGAN-GP\n", @@ -386,7 +386,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/DDPM/01-ddpm.ipynb b/DDPM/01-ddpm.ipynb index 06a7936..3989ead 100755 --- a/DDPM/01-ddpm.ipynb +++ b/DDPM/01-ddpm.ipynb @@ -6,7 +6,7 @@ "id": "756b572d", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [DDPM1] - Fashion MNIST Generation with DDPM\n", "<!-- DESC --> Diffusion Model example, to generate Fashion MNIST images.\n", @@ -792,7 +792,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/DRL/FIDLE_DQNfromScratch.ipynb b/DRL/FIDLE_DQNfromScratch.ipynb index 532d4c5..ac1a1e2 100755 --- a/DRL/FIDLE_DQNfromScratch.ipynb +++ b/DRL/FIDLE_DQNfromScratch.ipynb @@ -7,7 +7,7 @@ "id": "w_5p3EyVknLC" }, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [DRL1] - Solving CartPole with DQN\n", "<!-- DESC --> Using a a Deep Q-Network to play CartPole - an inverted pendulum problem (PyTorch)\n", diff --git a/DRL/FIDLE_rl_baselines_zoo.ipynb b/DRL/FIDLE_rl_baselines_zoo.ipynb index 5436663..15262b9 100755 --- a/DRL/FIDLE_rl_baselines_zoo.ipynb +++ b/DRL/FIDLE_rl_baselines_zoo.ipynb @@ -6,7 +6,7 @@ "id": "XJy9QoDC7XA7" }, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [DRL2] - RL Baselines3 Zoo: Training in Colab\n", "<!-- DESC --> Demo of Stable baseline3 with Colab\n", diff --git a/GTSRB/01-Preparation-of-data.ipynb b/GTSRB/01-Preparation-of-data.ipynb index d392488..19ca693 100644 --- a/GTSRB/01-Preparation-of-data.ipynb +++ b/GTSRB/01-Preparation-of-data.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [GTSRB1] - Dataset analysis and preparation\n", "<!-- DESC --> Episode 1 : Analysis of the GTSRB dataset and creation of an enhanced dataset\n", @@ -634,7 +634,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/GTSRB/02-First-convolutions.ipynb b/GTSRB/02-First-convolutions.ipynb index 426d516..272575b 100644 --- a/GTSRB/02-First-convolutions.ipynb +++ b/GTSRB/02-First-convolutions.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [GTSRB2] - First convolutions\n", "<!-- DESC --> Episode 2 : First convolutions and first classification of our traffic signs\n", @@ -391,7 +391,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/GTSRB/03-Tracking-and-visualizing.ipynb b/GTSRB/03-Tracking-and-visualizing.ipynb index 2054dce..0983cdf 100644 --- a/GTSRB/03-Tracking-and-visualizing.ipynb +++ b/GTSRB/03-Tracking-and-visualizing.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [GTSRB3] - Training monitoring\n", "<!-- DESC --> Episode 3 : Monitoring, analysis and check points during a training session\n", @@ -529,7 +529,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/GTSRB/04-Data-augmentation.ipynb b/GTSRB/04-Data-augmentation.ipynb index 51fbe3f..2a091d3 100644 --- a/GTSRB/04-Data-augmentation.ipynb +++ b/GTSRB/04-Data-augmentation.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [GTSRB4] - Data augmentation \n", "<!-- DESC --> Episode 4 : Adding data by data augmentation when we lack it, to improve our results\n", @@ -448,7 +448,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/GTSRB/05-Full-convolutions.ipynb b/GTSRB/05-Full-convolutions.ipynb index 25d6c98..e665e56 100644 --- a/GTSRB/05-Full-convolutions.ipynb +++ b/GTSRB/05-Full-convolutions.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [GTSRB5] - Full convolutions\n", "<!-- DESC --> Episode 5 : A lot of models, a lot of datasets and a lot of results.\n", @@ -478,7 +478,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/GTSRB/06-Notebook-as-a-batch.ipynb b/GTSRB/06-Notebook-as-a-batch.ipynb index f93f107..02b9820 100644 --- a/GTSRB/06-Notebook-as-a-batch.ipynb +++ b/GTSRB/06-Notebook-as-a-batch.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [GTSRB6] - Full convolutions as a batch\n", "<!-- DESC --> Episode 6 : To compute bigger, use your notebook in batch mode\n", @@ -175,7 +175,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/GTSRB/07-Show-report.ipynb b/GTSRB/07-Show-report.ipynb index 7f2aeb5..eaabf53 100644 --- a/GTSRB/07-Show-report.ipynb +++ b/GTSRB/07-Show-report.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [GTSRB7] - Batch reports\n", "<!-- DESC --> Episode 7 : Displaying our jobs report, and the winner is...\n", @@ -147,7 +147,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/IMDB/01-One-hot-encoding.ipynb b/IMDB/01-One-hot-encoding.ipynb index e39ec99..538393f 100644 --- a/IMDB/01-One-hot-encoding.ipynb +++ b/IMDB/01-One-hot-encoding.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [IMDB1] - Sentiment analysis with hot-one encoding\n", "<!-- DESC --> A basic example of sentiment analysis with sparse encoding, using a dataset from Internet Movie Database (IMDB)\n", @@ -582,7 +582,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/IMDB/02-Keras-embedding.ipynb b/IMDB/02-Keras-embedding.ipynb index d658281..a20acee 100644 --- a/IMDB/02-Keras-embedding.ipynb +++ b/IMDB/02-Keras-embedding.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [IMDB2] - Sentiment analysis with text embedding\n", "<!-- DESC --> A very classical example of word embedding with a dataset from Internet Movie Database (IMDB)\n", @@ -401,7 +401,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/IMDB/03-Prediction.ipynb b/IMDB/03-Prediction.ipynb index ffcb5c8..9e88752 100644 --- a/IMDB/03-Prediction.ipynb +++ b/IMDB/03-Prediction.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [IMDB3] - Reload and reuse a saved model\n", "<!-- DESC --> Retrieving a saved model to perform a sentiment analysis (movie review)\n", @@ -269,7 +269,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/IMDB/04-Show-vectors.ipynb b/IMDB/04-Show-vectors.ipynb index 350a550..3c24918 100644 --- a/IMDB/04-Show-vectors.ipynb +++ b/IMDB/04-Show-vectors.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [IMDB4] - Reload embedded vectors\n", "<!-- DESC --> Retrieving embedded vectors from our trained model\n", @@ -246,7 +246,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/IMDB/05-LSTM-Keras.ipynb b/IMDB/05-LSTM-Keras.ipynb index 7c61576..c082791 100644 --- a/IMDB/05-LSTM-Keras.ipynb +++ b/IMDB/05-LSTM-Keras.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [IMDB5] - Sentiment analysis with a RNN network\n", "<!-- DESC --> Still the same problem, but with a network combining embedding and RNN\n", @@ -429,7 +429,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/LinearReg/01-Linear-Regression.ipynb b/LinearReg/01-Linear-Regression.ipynb index bcd6815..77780c5 100644 --- a/LinearReg/01-Linear-Regression.ipynb +++ b/LinearReg/01-Linear-Regression.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [LINR1] - Linear regression with direct resolution\n", "<!-- DESC --> Low-level implementation, using numpy, of a direct resolution for a linear regression\n", @@ -181,7 +181,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/LinearReg/02-Gradient-descent.ipynb b/LinearReg/02-Gradient-descent.ipynb index 0cb10ef..6683797 100644 --- a/LinearReg/02-Gradient-descent.ipynb +++ b/LinearReg/02-Gradient-descent.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [GRAD1] - Linear regression with gradient descent\n", "<!-- DESC --> Low level implementation of a solution by gradient descent. Basic and stochastic approach.\n", @@ -147,7 +147,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/LinearReg/03-Polynomial-Regression.ipynb b/LinearReg/03-Polynomial-Regression.ipynb index 1bf39cb..b5483e8 100644 --- a/LinearReg/03-Polynomial-Regression.ipynb +++ b/LinearReg/03-Polynomial-Regression.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [POLR1] - Complexity Syndrome\n", "<!-- DESC --> Illustration of the problem of complexity with the polynomial regression\n", @@ -219,7 +219,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/LinearReg/04-Logistic-Regression.ipynb b/LinearReg/04-Logistic-Regression.ipynb index e86c631..c4cbd96 100644 --- a/LinearReg/04-Logistic-Regression.ipynb +++ b/LinearReg/04-Logistic-Regression.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [LOGR1] - Logistic regression\n", "<!-- DESC --> Simple example of logistic regression with a sklearn solution\n", @@ -473,7 +473,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/MNIST/01-DNN-MNIST.ipynb b/MNIST.Keras/01-DNN-MNIST.ipynb similarity index 97% rename from MNIST/01-DNN-MNIST.ipynb rename to MNIST.Keras/01-DNN-MNIST.ipynb index 7ad1e4a..0d15647 100644 --- a/MNIST/01-DNN-MNIST.ipynb +++ b/MNIST.Keras/01-DNN-MNIST.ipynb @@ -4,9 +4,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", - "# <!-- TITLE --> [MNIST1] - Simple classification with DNN\n", + "# <!-- TITLE --> [KMNIST1] - Simple classification with DNN\n", "<!-- DESC --> An example of classification using a dense neural network for the famous MNIST dataset\n", "<!-- AUTHOR : Jean-Luc Parouty (CNRS/SIMaP) -->\n", "\n", @@ -322,7 +322,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/MNIST/02-CNN-MNIST.ipynb b/MNIST.Keras/02-CNN-MNIST.ipynb similarity index 97% rename from MNIST/02-CNN-MNIST.ipynb rename to MNIST.Keras/02-CNN-MNIST.ipynb index db913b6..d715ea3 100644 --- a/MNIST/02-CNN-MNIST.ipynb +++ b/MNIST.Keras/02-CNN-MNIST.ipynb @@ -4,9 +4,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", - "# <!-- TITLE --> [MNIST2] - Simple classification with CNN\n", + "# <!-- TITLE --> [KMNIST2] - Simple classification with CNN\n", "<!-- DESC --> An example of classification using a convolutional neural network for the famous MNIST dataset\n", "<!-- AUTHOR : Jean-Luc Parouty (CNRS/SIMaP) -->\n", "\n", @@ -342,7 +342,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/MNIST_Lightning/01-DNN-MNIST_Lightning.ipynb b/MNIST.Lightning/01-DNN-MNIST_Lightning.ipynb similarity index 98% rename from MNIST_Lightning/01-DNN-MNIST_Lightning.ipynb rename to MNIST.Lightning/01-DNN-MNIST_Lightning.ipynb index ad89bab..c58aaf4 100644 --- a/MNIST_Lightning/01-DNN-MNIST_Lightning.ipynb +++ b/MNIST.Lightning/01-DNN-MNIST_Lightning.ipynb @@ -8,9 +8,9 @@ }, "source": [ "\n", - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", - "# <!-- TITLE --> [MNIST2] - Simple classification with DNN using pytorch lightning\n", + "# <!-- TITLE --> [LMNIST1] - Simple classification with DNN using Pytorch Lightning\n", "<!-- DESC --> An example of classification using a dense neural network for the famous MNIST dataset\n", "<!-- AUTHOR : MBOGOL Touye Achille (AI/ML Engineer EFELIA-MIAI/SIMAP Lab) -->\n", "\n", @@ -534,7 +534,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/MNIST_Lightning/02-CNN-MNIST_Lightning.ipynb b/MNIST.Lightning/02-CNN-MNIST_Lightning.ipynb similarity index 98% rename from MNIST_Lightning/02-CNN-MNIST_Lightning.ipynb rename to MNIST.Lightning/02-CNN-MNIST_Lightning.ipynb index 11fff32..f1559c5 100644 --- a/MNIST_Lightning/02-CNN-MNIST_Lightning.ipynb +++ b/MNIST.Lightning/02-CNN-MNIST_Lightning.ipynb @@ -6,9 +6,9 @@ "metadata": {}, "source": [ "\n", - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", - "## <!-- TITLE --> [MNIST2] - Simple classification with CNN using lightning\n", + "## <!-- TITLE --> [LMNIST2] - Simple classification with CNN using Pytorch Lightning\n", "<!-- DESC --> An example of classification using a convolutional neural network for the famous MNIST dataset\n", "<!-- AUTHOR : MBOGOL Touye Achille (AI/ML Engineer MIAI/SIMaP) -->\n", "\n", @@ -539,7 +539,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/MNIST_PyTorch/01-DNN-MNIST_PyTorch.ipynb b/MNIST.PyTorch/01-DNN-MNIST_PyTorch.ipynb similarity index 98% rename from MNIST_PyTorch/01-DNN-MNIST_PyTorch.ipynb rename to MNIST.PyTorch/01-DNN-MNIST_PyTorch.ipynb index 412ff83..94e1c34 100644 --- a/MNIST_PyTorch/01-DNN-MNIST_PyTorch.ipynb +++ b/MNIST.PyTorch/01-DNN-MNIST_PyTorch.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [PMNIST1] - Simple classification with DNN\n", "<!-- DESC -->Example of classification with a fully connected neural network, using Pytorch\n", @@ -54,7 +54,7 @@ "import sys,os\n", "\n", "import fidle\n", - "from fidle_pwk_additional import convergence_history_CrossEntropyLoss\n", + "from modules.fidle_pwk_additional import convergence_history_CrossEntropyLoss\n", "\n", "# Init Fidle environment\n", "run_id, run_dir, datasets_dir = fidle.init('PMNIST1')" @@ -377,7 +377,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/MNIST_PyTorch/fidle_pwk_additional.py b/MNIST.PyTorch/modules/fidle_pwk_additional.py similarity index 100% rename from MNIST_PyTorch/fidle_pwk_additional.py rename to MNIST.PyTorch/modules/fidle_pwk_additional.py diff --git a/MNIST_Lightning/.gitkeep b/MNIST_Lightning/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/MNIST_PyTorch/.gitkeep b/MNIST_PyTorch/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/Misc/Numpy.ipynb b/Misc/00-Numpy.ipynb similarity index 98% rename from Misc/Numpy.ipynb rename to Misc/00-Numpy.ipynb index 8add3d3..173ec13 100644 --- a/Misc/Numpy.ipynb +++ b/Misc/00-Numpy.ipynb @@ -8,7 +8,7 @@ } }, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [NP1] - A short introduction to Numpy\n", "<!-- DESC --> Numpy is an essential tool for the Scientific Python.\n", @@ -483,7 +483,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/Misc/Activation-Functions.ipynb b/Misc/01-Activation-Functions.ipynb similarity index 97% rename from Misc/Activation-Functions.ipynb rename to Misc/01-Activation-Functions.ipynb index ab20eae..4c0dda4 100644 --- a/Misc/Activation-Functions.ipynb +++ b/Misc/01-Activation-Functions.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [ACTF1] - Activation functions\n", "<!-- DESC --> Some activation functions, with their derivatives.\n", @@ -210,7 +210,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/Misc/Using-pandas.ipynb b/Misc/02-Using-pandas.ipynb similarity index 95% rename from Misc/Using-pandas.ipynb rename to Misc/02-Using-pandas.ipynb index bc314cf..d8213e5 100644 --- a/Misc/Using-pandas.ipynb +++ b/Misc/02-Using-pandas.ipynb @@ -8,7 +8,7 @@ } }, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [PANDAS1] - Quelques exemples avec Pandas\n", "<!-- DESC --> pandas is another essential tool for the Scientific Python.\n", @@ -114,7 +114,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/Misc/03-Using-Pytorch.ipynb b/Misc/03-Using-Pytorch.ipynb new file mode 100644 index 0000000..d13701c --- /dev/null +++ b/Misc/03-Using-Pytorch.ipynb @@ -0,0 +1,2753 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "51be1de8", + "metadata": {}, + "source": [ + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", + "\n", + "# <!-- TITLE --> [PYTORCH1] - Practical Lab : PyTorch\n", + "<!-- DESC --> PyTorch est l'un des principaux framework utilisé dans le Deep Learning\n", + "<!-- AUTHOR : Kamel Guerda (CNRS/IDRIS) -->\n", + "\n", + "## Objectives :\n", + " - Understand PyTorch" + ] + }, + { + "cell_type": "markdown", + "id": "1959d3d5-388e-4c43-8318-342f08e6b024", + "metadata": { + "tags": [] + }, + "source": [ + "## **Introduction**" + ] + }, + { + "cell_type": "markdown", + "id": "a6da1305-551a-4549-abed-641415823a33", + "metadata": {}, + "source": [ + "**PyTorch** is an open-source machine learning library developed by Facebook's AI Research lab. It offers an imperative and dynamic computational model, making it particularly easy and intuitive for researchers. Its primary feature is the tensor, a multi-dimensional array similar to NumPy's ndarray, but with GPU acceleration." + ] + }, + { + "cell_type": "markdown", + "id": "54c79dfb-a061-4b72-afe3-c97c28071e5c", + "metadata": { + "tags": [] + }, + "source": [ + "### **Installation and usage**" + ] + }, + { + "cell_type": "markdown", + "id": "20852981-c289-4c4e-8099-2c5efef58e3b", + "metadata": {}, + "source": [ + "Whether you're working on the supercomputer Jean Zay or your own machine, getting your environment ready is the first step. Here's how to proceed:" + ] + }, + { + "cell_type": "markdown", + "id": "a88f32bd-37f6-4e99-97e0-62283a146a1f", + "metadata": { + "tags": [] + }, + "source": [ + "#### **On Jean Zay**" + ] + }, + { + "cell_type": "markdown", + "id": "8421a9f0-130d-40ef-8a7a-066bf9147066", + "metadata": {}, + "source": [ + "For those accessing the Jean Zay supercomputer (you should already be at step 3):\n", + "\n", + "1. **Access JupyterHub**: Go to [https://jupyterhub.idris.fr](https://jupyterhub.idris.fr). The login credentials are the same as those used to access the Jean Zay machine. Ensure your IP address is whitelisted (add a new IP via the account management form if needed).\n", + "2. **Create a JupyterLab Instance**: Choose to create the instance either on a frontend node (e.g., for internet access) or on a compute node by reserving resources via Slurm. Select the appropriate options such as workspace, allocated resources, billing, etc.\n", + "3. **Choose the Kernel**: IDRIS provides kernels based on modules installed on Jean Zay. This includes various versions of Python, Tensorflow, and PyTorch. Create a new notebook with the desired kernel through the launcher or change the kernel on an existing notebook by clicking the kernel name at the top right of the screen.\n", + "4. For advanced features like Tensorboard, MLFlow, custom kernel creation, etc., refer to the [JupyterHub technical documentation](https://jupyterhub.idris.fr/services/documentation/).\n" + ] + }, + { + "cell_type": "markdown", + "id": "a168594c-cf18-4ed8-babf-242b56b3e0b7", + "metadata": { + "tags": [] + }, + "source": [ + "> **Task:** Verifying Your Kernel in the upper top corner\n", + "> - In JupyterLab, at the top right of your notebook, you should see the name of your current kernel.\n", + "> - Ensure it matches \"PyTorch 2.0\" or a similar name indicating the PyTorch version.\n", + "> - If it doesn't, click on the kernel name and select the appropriate kernel from the list.\n" + ] + }, + { + "cell_type": "markdown", + "id": "0aaadeee-5115-48d0-aa57-20a0a63d5054", + "metadata": { + "tags": [] + }, + "source": [ + "#### **Elsewhere**" + ] + }, + { + "cell_type": "markdown", + "id": "5d34951e-1b7b-4776-9449-eff57a9385f4", + "metadata": {}, + "source": [ + "\n", + "For users on other platforms:\n", + "\n", + "1. Install PyTorch by following the official [installation guide](https://pytorch.org/get-started/locally/).\n", + "2. If you have a GPU, ensure you've installed the necessary CUDA toolkit and cuDNN libraries.\n", + "3. Launch your preferred Python environment, whether it's Jupyter notebook, an IDE like PyCharm, or just the terminal.\n", + "\n", + "Once your setup is complete, you're ready to dive in. Let's explore the fascinating world of deep learning!" + ] + }, + { + "cell_type": "markdown", + "id": "7552d5ac-eb8c-48e0-9e61-3b056d560f7b", + "metadata": { + "tags": [] + }, + "source": [ + "### **Version**" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "272e492f-35c5-4293-b504-8e8632da1b73", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Importing PyTorch\n", + "import torch\n", + "\n", + "# TODO: Print the version of PyTorch being used\n" + ] + }, + { + "cell_type": "markdown", + "id": "9fdbe225-4e06-4ad0-abca-4325457dc0e1", + "metadata": {}, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "To print the version of PyTorch you're using, you can access the <code>__version__</code> attribute of the <code>torch</code> module. \n", + " \n", + "```python\n", + "print(torch.__version__)\n", + "```" + ] + }, + { + "cell_type": "markdown", + "id": "72752068-02fe-4e44-8c27-40e8f66680c9", + "metadata": { + "tags": [] + }, + "source": [ + "**Why PyTorch 2.0 is a Game-Changer**\n", + "\n", + "PyTorch 2.0 represents a major step in the evolution of this popular deep learning library. As part of the transition to the 2-series, let's highlight some reasons why this version is pivotal:\n", + "\n", + "1. **Performance**: With PyTorch 2.0, performance has been supercharged at the compiler level, offering faster execution and support for Dynamic Shapes and Distributed systems.\n", + " \n", + "2. **torch.compile**: This introduces a more Pythonic approach, moving some parts of PyTorch from C++ back to Python. Notably, across a test set of 163 open-source models, the use of `torch.compile` resulted in a 43% speed increase during training on an NVIDIA A100 GPU.\n", + "\n", + "3. **Innovative Technologies**: Technologies like TorchDynamo and TorchInductor, both written in Python, make PyTorch more flexible and developer-friendly.\n", + " \n", + "4. **Staying Pythonic**: PyTorch 2.0 emphasizes Python-centric development, reducing barriers for developers and vendors.\n", + "\n", + "As we progress in this lab, we'll dive deeper into some of these features, giving you hands-on experience with the power and flexibility of PyTorch 2.0.\n" + ] + }, + { + "cell_type": "markdown", + "id": "bc215c02-1f16-48be-88f9-5080fd2be9ed", + "metadata": { + "tags": [] + }, + "source": [ + "## **Pytorch Fundamentals**" + ] + }, + { + "cell_type": "markdown", + "id": "bcd7f0fc-a714-495e-9307-e48964abd85b", + "metadata": { + "tags": [] + }, + "source": [ + "### **Tensors**" + ] + }, + { + "cell_type": "markdown", + "id": "6e185bf6-3d3c-4a43-b425-e6aa3da5d5dd", + "metadata": { + "tags": [] + }, + "source": [ + "A **tensor** is a generalization of vectors and matrices and is easily understood as a multi-dimensional array. In the context of PyTorch:\n", + "- A 0-dimensional tensor is a scalar (a single number).\n", + "- A 1-dimensional tensor is a vector.\n", + "- A 2-dimensional tensor is a matrix.\n", + "- ... and so on for higher dimensions.\n", + "\n", + "Tensors are fundamental to PyTorch not just as data containers but also for their compatibility with GPU acceleration, making operations on them extremely fast. This acceleration is vital for training large neural networks.\n", + "\n", + "Let's start our journey with tensors by examining how PyTorch handles scalars." + ] + }, + { + "cell_type": "markdown", + "id": "fa90e399-3955-4417-a4a3-c0c812ebb1d9", + "metadata": { + "tags": [] + }, + "source": [ + "#### **Scalars in PyTorch**\n", + "\n", + "### Scalars in PyTorch\n", + "\n", + "A scalar, being a 0-dimensional tensor, is simply a single number. While it might seem trivial, understanding scalars in PyTorch lays the foundation for grasping more complex tensor structures. Familiarize yourself with the `torch.tensor()` function from the [official documentation](https://pytorch.org/docs/stable/generated/torch.tensor.html) before proceeding.\n", + "\n", + "> **Task**: Create a scalar tensor in PyTorch and examine its properties.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "b6db1841-0fab-4df0-b699-058d5a477ca6", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "ename": "SyntaxError", + "evalue": "invalid syntax (2309926818.py, line 2)", + "output_type": "error", + "traceback": [ + "\u001b[0;36m Cell \u001b[0;32mIn[2], line 2\u001b[0;36m\u001b[0m\n\u001b[0;31m scalar_tensor = # Your code here\u001b[0m\n\u001b[0m ^\u001b[0m\n\u001b[0;31mSyntaxError\u001b[0m\u001b[0;31m:\u001b[0m invalid syntax\n" + ] + } + ], + "source": [ + "# TODO: Create a scalar tensor with the value 7.5\n", + "scalar_tensor = # Your code here\n", + "\n", + "# Print the scalar tensor\n", + "print(\"Scalar Tensor:\", scalar_tensor)\n", + "\n", + "# TODO: Print its dimension, shape, and type\n" + ] + }, + { + "cell_type": "markdown", + "id": "c9bc265c-9a7f-4588-8586-562b390d63d9", + "metadata": { + "tags": [] + }, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "To create a scalar tensor, use the <code>torch.tensor()</code> function. To retrieve its dimension, shape, and type, you can use the <code>.dim()</code>, <code>.shape</code>, and <code>.dtype</code> attributes respectively. \n", + "\n", + "Here's how you can achieve that:\n", + "\n", + "```python\n", + "scalar_tensor = torch.tensor(7.5)\n", + "print(\"Scalar Tensor:\", scalar_tensor)\n", + "print(\"Dimension:\", scalar_tensor.dim())\n", + "print(\"Shape:\", scalar_tensor.shape)\n", + "print(\"Type:\", scalar_tensor.dtype)\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "fc240c26-5866-4080-bbb9-d5cde1500300", + "metadata": { + "tags": [] + }, + "source": [ + "#### **Vectors in PyTorch**\n", + "\n", + "A vector in PyTorch is a 1-dimensional tensor. It's essentially a list of numbers that can represent anything from a sequence of data points to the weights of a neural network layer.\n", + "\n", + "In this section, we'll see how to create and manipulate vectors using PyTorch. We'll also look at some basic operations you can perform on them.\n", + "\n", + "> **Task**: Create a 1-dimensional tensor (vector) with values `[1.5, 2.3, 3.1, 4.8, 5.2]` and print its dimension, shape, and type.\n", + "\n", + "Start by referring to the `torch.tensor()` function in the [official documentation](https://pytorch.org/docs/stable/generated/torch.tensor.html) to understand how to create tensors of varying dimensions.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "e9503b49-38d1-45d9-910f-761da82cfbd0", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "ename": "SyntaxError", + "evalue": "invalid syntax (138343520.py, line 2)", + "output_type": "error", + "traceback": [ + "\u001b[0;36m Cell \u001b[0;32mIn[3], line 2\u001b[0;36m\u001b[0m\n\u001b[0;31m vector_tensor = # Your code here\u001b[0m\n\u001b[0m ^\u001b[0m\n\u001b[0;31mSyntaxError\u001b[0m\u001b[0;31m:\u001b[0m invalid syntax\n" + ] + } + ], + "source": [ + "# TODO: Create a 1-dimensional tensor (vector) with values [1.5, 2.3, 3.1, 4.8, 5.2]\n", + "vector_tensor = # Your code here\n", + "\n", + "# Print the vector tensor\n", + "print(\"Vector Tensor:\", vector_tensor)\n", + "\n", + "# TODO: Print its dimension, shape, and type\n" + ] + }, + { + "cell_type": "markdown", + "id": "13252d1f-004f-42e0-aec9-56322b43ab72", + "metadata": { + "tags": [] + }, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "Creating a 1-dimensional tensor is similar to creating a scalar. Instead of a single number, you pass a list of numbers to the <code>torch.tensor()</code> function. The <code>.dim()</code>, <code>.shape</code>, and <code>.dtype</code> attributes will help you retrieve its properties.\n", + "\n", + "```python\n", + "vector_tensor = torch.tensor([1.5, 2.3, 3.1, 4.8, 5.2])\n", + "print(\"Vector Tensor:\", vector_tensor)\n", + "print(\"Dimension:\", vector_tensor.dim())\n", + "print(\"Shape:\", vector_tensor.shape)\n", + "print(\"Type:\", vector_tensor.dtype)\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "7bfc47a8-e99d-4683-ac36-287f35a76fd0", + "metadata": {}, + "source": [ + "#### **Vector Operations**\n", + "\n", + "Vectors are not just static entities; we often perform various operations on them, especially in the context of neural networks. This includes addition, subtraction, scalar multiplication, dot products, etc.\n", + "\n", + "> **Task**: Using the previously defined `vector_tensor`, perform the following operations:\n", + "1. Add 5 to all the elements of the vector.\n", + "2. Multiply all the elements of the vector by 2.\n", + "3. Compute the dot product of the vector with itself." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "86182e1c-5491-4743-a7c8-10b9effd8194", + "metadata": { + "tags": [] + }, + "outputs": [ + { + "ename": "SyntaxError", + "evalue": "invalid syntax (184231995.py, line 2)", + "output_type": "error", + "traceback": [ + "\u001b[0;36m Cell \u001b[0;32mIn[4], line 2\u001b[0;36m\u001b[0m\n\u001b[0;31m vector_added = # Your code here\u001b[0m\n\u001b[0m ^\u001b[0m\n\u001b[0;31mSyntaxError\u001b[0m\u001b[0;31m:\u001b[0m invalid syntax\n" + ] + } + ], + "source": [ + "# TODO: Add 5 to all elements\n", + "vector_added = # Your code here\n", + "\n", + "# TODO: Multiply all elements by 2\n", + "vector_multiplied = # Your code here\n", + "\n", + "# TODO: Compute the dot product with itself\n", + "dot_product = # Your code here\n", + "\n", + "# Print the results\n", + "print(\"Vector after addition:\", vector_added)\n", + "print(\"Vector after multiplication:\", vector_multiplied)\n", + "print(\"Dot Product:\", dot_product)" + ] + }, + { + "cell_type": "markdown", + "id": "75773a02-3ab4-4325-99fb-7a742e997f21", + "metadata": { + "tags": [] + }, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "PyTorch tensors support regular arithmetic operations. For the dot product, you can use the <code>torch.dot()</code> function.\n", + "\n", + "```python\n", + "\n", + "vector_added = vector_tensor + 5\n", + "vector_multiplied = vector_tensor * 2\n", + "dot_product = torch.dot(vector_tensor, vector_tensor)\n", + "\n", + "print(\"Vector after addition:\", vector_added)\n", + "print(\"Vector after multiplication:\", vector_multiplied)\n", + "print(\"Dot Product:\", dot_product)\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "2b4766ba-ef9a-4f24-ba43-7358097a7b61", + "metadata": { + "tags": [] + }, + "source": [ + "#### **Matrices in PyTorch**\n", + "\n", + "A matrix in PyTorch is represented as a 2D tensor. Just as vectors are generalizations of scalars, matrices are generalizations of vectors, providing an additional dimension. Matrices are crucial for a range of operations in deep learning, including representing datasets, transformations, and more.\n" + ] + }, + { + "cell_type": "markdown", + "id": "2ec7544d-ef87-4773-88d8-cee731d1c43c", + "metadata": { + "tags": [] + }, + "source": [ + "##### **Creating Matrices**\n", + "\n", + "Before diving into manual matrix creation, it's beneficial to know some utility functions PyTorch provides:\n", + "\n", + "- `torch.rand()`: Generates a matrix with random values between 0 and 1.\n", + "- `torch.eye()`: Creates an identity matrix.\n", + "- `torch.zeros()`: Generates a matrix filled with zeros.\n", + "- `torch.ones()`: Generates a matrix filled with ones.\n", + "\n", + "You can explore more about these functions in the [official documentation](https://pytorch.org/docs/stable/tensors.html).\n", + "\n", + "> **Task**: Using the above functions, create the following matrices:\n", + "> 1. A 3x3 matrix with random values.\n", + "> 2. A 5x5 identity matrix.\n", + "> 3. A 2x4 matrix filled with zeros.\n", + "> 4. A 4x2 matrix filled with ones.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5014b564-6bf5-4f00-a513-578ca72d94a8", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Your code for creating the matrices goes here\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "id": "86b2708c-45c6-4b2c-b526-41491fcafa08", + "metadata": { + "tags": [] + }, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "To create these matrices, make use of the following functions:\n", + "\n", + "1. `torch.rand(size)`: Use this function and specify the size as `(3, 3)` to create a 3x3 matrix with random values.\n", + "2. `torch.eye(n, m)`: Use this to generate an identity matrix. For a square matrix like 5x5, n and m would both be 5.\n", + "3. `torch.zeros(m, n)`: For a 2x4 matrix filled with zeros, specify m=2 and n=4.\n", + "4. `torch.ones(m, n)`: Similar to the `zeros` function but fills the matrix with ones.\n", + "\n", + "```python\n", + "# 1. 3x3 matrix with random values\n", + "random_matrix = torch.rand(3, 3)\n", + "print(random_matrix)\n", + "\n", + "# 2. 5x5 identity matrix\n", + "identity_matrix = torch.eye(5, 5)\n", + "print(identity_matrix)\n", + "\n", + "# 3. 2x4 matrix filled with zeros\n", + "zero_matrix = torch.zeros(2, 4)\n", + "print(zero_matrix)\n", + "\n", + "# 4. 4x2 matrix filled with ones\n", + "one_matrix = torch.ones(4, 2)\n", + "print(one_matrix)\n", + "```\n", + "</details>\n" + ] + }, + { + "cell_type": "markdown", + "id": "60ff5e51-699e-46a1-8cc7-1d5fc9a4d078", + "metadata": {}, + "source": [ + "#### **Matrix Operations in PyTorch**\n", + "\n", + "Just like vectors, matrices can undergo a variety of operations. Some of the basic ones include matrix addition, subtraction, and multiplication. More advanced operations include matrix inversion, transposition, and determinant calculation.\n" + ] + }, + { + "cell_type": "markdown", + "id": "c6bdb9d9-b299-4d63-b92f-7c4b8c32a1b7", + "metadata": { + "tags": [] + }, + "source": [ + "##### **Basic Matrix Operations**\n", + "\n", + "> **Task**: Perform the following operations on matrices:\n", + "> 1. Create two 3x3 matrices with random values.\n", + "> 2. Add the two matrices.\n", + "> 3. Subtract the second matrix from the first one.\n", + "> 4. Multiply the two matrices element-wise.\n", + "\n", + "Remember, for matrix multiplication that results in the dot product, you'd use `torch.mm` or `@`, but for element-wise multiplication, you use `*`.\n", + "\n", + "Here's the [official documentation](https://pytorch.org/docs/stable/tensors.html#torch.Tensor.matmul) on matrix operations for your reference.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6be8c647-c455-4d3b-8a21-c4b7102ffa75", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Your code for creating the matrices and performing the operations goes here" + ] + }, + { + "cell_type": "markdown", + "id": "0020b26b-b2bb-4efa-9bf3-3f037acd050e", + "metadata": { + "tags": [] + }, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "Here's how you can perform the given matrix operations:\n", + "\n", + "```python\n", + "# 1. Create two 3x3 matrices with random values\n", + "matrix1 = torch.rand(3, 3)\n", + "matrix2 = torch.rand(3, 3)\n", + "print(\"Matrix 1:\\n\", matrix1)\n", + "print(\"\\nMatrix 2:\\n\", matrix2)\n", + "\n", + "# 2. Add the two matrices\n", + "sum_matrix = matrix1 + matrix2\n", + "print(\"\\nSum of matrices:\\n\", sum_matrix)\n", + "\n", + "# 3. Subtract the second matrix from the first one\n", + "difference_matrix = matrix1 - matrix2\n", + "print(\"\\nDifference of matrices:\\n\", difference_matrix)\n", + "\n", + "# 4. Multiply the two matrices element-wise\n", + "product_matrix = matrix1 * matrix2\n", + "print(\"\\nElement-wise product of matrices:\\n\", product_matrix)\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "07f57464-76e2-4670-8332-3fcec2e162bd", + "metadata": {}, + "source": [ + "#### **Higher-Dimensional Tensors in PyTorch**\n", + "\n", + "While scalars, vectors, and matrices cover 0D, 1D, and 2D tensors respectively, in deep learning, especially in tasks like image processing, you often encounter tensors with more than two dimensions.\n", + "\n", + "For instance, a colored image is often represented as a 3D tensor: height x width x channels (e.g., RGB channels). A batch of such images would then be a 4D tensor: batch_size x height x width x channels.\n", + "\n", + "Let's get our hands dirty with some higher-dimensional tensors!\n" + ] + }, + { + "cell_type": "markdown", + "id": "3dd1fea7-d290-49fe-ac1f-5a8387e3d386", + "metadata": { + "tags": [] + }, + "source": [ + "##### **Creating a 3D Tensor**\n", + "\n", + "> **Task**: Create a 3D tensor representing 2 images of size 4x4 with 3 channels (like RGB) filled with random values.\n", + "\n", + "Use the `torch.rand` function, and remember to specify the dimensions correctly.\n", + "\n", + "Here's the [official documentation](https://pytorch.org/docs/stable/tensors.html#creation-ops) for tensor creation.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e7c8ac6e-f870-4b5d-ac2c-05be1d0cc9f1", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Your code for creating the 3D tensor goes here" + ] + }, + { + "cell_type": "markdown", + "id": "efe61750-a91f-428a-b4e2-7df0cc2a782b", + "metadata": { + "tags": [] + }, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "Creating a 3D tensor with the given specifications can be achieved using the `torch.rand` function. Here's how:\n", + "\n", + "```python\n", + "# Create a 3D tensor representing 2 images of size 4x4 with 3 channels\n", + "image_tensor = torch.rand(2, 4, 4, 3)\n", + "print(image_tensor)\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "8cfbcaa0-a0f6-4869-ba94-65d4439a60ca", + "metadata": {}, + "source": [ + "#### **Reshaping Tensors**\n", + "\n", + "In deep learning, we often need to reshape our tensors. For instance, an image represented as a 3D tensor might need to be reshaped into a 1D tensor before passing it through a fully connected layer. PyTorch provides methods to make this easy.\n", + "\n", + "The most commonly used method for reshaping tensors in PyTorch is the `view()` method. Another method that offers more flexibility (especially when you're unsure about the size of one dimension) is `reshape()`.\n", + "\n", + ">[Task]: Using the official documentation, find out how to use the [`view()`](https://pytorch.org/docs/stable/tensors.html#torch.Tensor.view) and [`reshape()`](https://pytorch.org/docs/stable/tensors.html#torch.Tensor.reshape) methods. Create a 2x3 tensor using `torch.tensor()` and then reshape it into a 3x2 tensor.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e6758ba7-aa35-42f0-87c1-86b88de64238", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Create a 2x3 tensor\n", + "\n", + "# Reshape it into a 3x2 tensor\n" + ] + }, + { + "cell_type": "markdown", + "id": "fea31255-c2fe-47b2-b03b-c2b35953e05a", + "metadata": { + "tags": [] + }, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "To reshape a tensor using <code>view()</code> method:\n", + "\n", + "```python\n", + "tensor = torch.tensor([[1, 2, 3], [4, 5, 6]])\n", + "reshaped_tensor = tensor.view(3, 2)\n", + "```\n", + "<br>\n", + "Alternatively, using the <code>reshape()</code> method:\n", + "\n", + "```python\n", + "reshaped_tensor = tensor.reshape(3, 2)\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "c580dbca-b75a-4b97-a24a-6a19c7cdf8d1", + "metadata": {}, + "source": [ + "#### **Broadcasting**\n", + "\n", + "Broadcasting is a powerful feature in PyTorch that allows you to perform operations between tensors of different shapes. When possible, PyTorch will automatically reshape the tensors in a way that makes the operation valid. This can significantly reduce manual reshaping and is efficient in memory usage.\n", + "\n", + "However, it's essential to understand the rules and nuances of broadcasting to use it effectively and avoid unexpected behaviors.\n", + "\n", + ">[Task]: Given a tensor `A` of shape (4, 1) and another tensor `B` of shape (1, 4), use PyTorch operations to produce a result tensor of shape (4, 4). Check the [official documentation on broadcasting](https://pytorch.org/docs/stable/notes/broadcasting.html) for guidance.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "44566fb7-87ed-41ef-a86e-db32a1cf2179", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Define tensor A of shape (4, 1) and tensor B of shape (1, 4)\n", + "\n", + "# Perform an operation to get a result tensor of shape (4, 4)\n" + ] + }, + { + "cell_type": "markdown", + "id": "2602f2c4-f507-4a9a-8e8d-dee5e95efc61", + "metadata": { + "tags": [] + }, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "You can simply use addition, subtraction, multiplication, or any other element-wise operation. When you do this operation, PyTorch will automatically broadcast the tensors to a compatible shape. For example:\n", + "\n", + "```python\n", + "A = torch.tensor([[1], [2], [3], [4]])\n", + "B = torch.tensor([[1, 2, 3, 4]])\n", + "result = A * B\n", + "print(result)\n", + "```\n", + "</details>\n" + ] + }, + { + "cell_type": "markdown", + "id": "ba2cc439-8ecc-4d92-b78f-39ef762678f8", + "metadata": { + "tags": [] + }, + "source": [ + "### **GPU Support with CUDA**" + ] + }, + { + "cell_type": "markdown", + "id": "575536c5-87a7-4781-8557-558627f14c0a", + "metadata": { + "tags": [] + }, + "source": [ + "PyTorch seamlessly supports operations on Graphics Processing Units (GPUs) through CUDA, an API developed by NVIDIA for their GPUs. If you have a compatible NVIDIA GPU on your machine, PyTorch can utilize it to speed up tensor operations which can be orders of magnitude faster than on a CPU.\n", + "\n", + "To verify if your PyTorch installation can use CUDA, you can check the attribute `torch.cuda.is_available()`. This returns `True` if CUDA is available and PyTorch can use GPUs, otherwise it returns `False`.\n", + "\n", + ">[Task]: Print whether CUDA support is available on your system. The [CUDA documentation](https://pytorch.org/docs/stable/cuda.html) might be useful for this task." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "38e84bb7-5026-4262-8b78-b368c55a1450", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Check and print if CUDA is available\n", + "cuda_available = None # Replace None with the appropriate code\n", + "print(\"CUDA available:\", cuda_availablez" + ] + }, + { + "cell_type": "markdown", + "id": "646b5660-5131-4ce0-9592-0fd14608c6df", + "metadata": { + "tags": [] + }, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "To check if CUDA is available, you can utilize the torch.cuda.is_available() function.\n", + "```python\n", + "cuda_available = torch.cuda.is_available()\n", + "print(\"CUDA available:\", cuda_available)\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "86c8d7ed-0931-4874-bb27-e796ae1a1d7a", + "metadata": {}, + "source": [ + "When developing deep learning models in PyTorch, it's a good habit to write device-agnostic code. This means your code can automatically use a GPU if available, or fall back to using the CPU if not. The `torch.device` object allows you to specify the device (either CPU or GPU) where you'd like your tensors to be allocated.\n", + "\n", + "To dynamically determine the device, a common pattern is to check `torch.cuda.is_available()`, and set the device accordingly. This is particularly useful when you want your code to be flexible, regardless of the underlying hardware.\n", + "\n", + ">[Task]: Define a `device` variable that is set to 'cuda:0' if CUDA is available and 'cpu' otherwise. Create a tensor on this device. The [documentation about torch.device](https://pytorch.org/docs/stable/tensor_attributes.html#torch-device) might be handy." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "91e05e75-03ad-44cb-9842-89e2017ee709", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Define the device\n", + "device = None # Replace None with the appropriate code\n", + "\n", + "# Create a tensor on the specified device\n", + "tensor_on_device = torch.tensor([1, 2, 3, 4, 5], device=device)" + ] + }, + { + "cell_type": "markdown", + "id": "3b80406b-b1cc-4831-a6ba-8e6385703755", + "metadata": {}, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "To define the device variable dynamically:\n", + "\n", + "```python\n", + "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n", + "```\n", + "<br>\n", + "After setting the device, you can create tensors on it directly using the device argument.\n", + "\n", + "</details>\n" + ] + }, + { + "cell_type": "markdown", + "id": "574a2192-cc09-4d2c-8f01-97b051b7ffc8", + "metadata": { + "tags": [] + }, + "source": [ + "### **Automatic Differentiation with Autograd**" + ] + }, + { + "cell_type": "markdown", + "id": "7f5406f6-e295-4f70-a815-9eef18352390", + "metadata": { + "tags": [] + }, + "source": [ + "PyTorch's `autograd` module provides the tools for automatically computing the gradients for tensors. This feature is a cornerstone for neural network training, as gradients are essential for optimization algorithms like gradient descent.\n", + "\n", + "When we create a tensor, `requires_grad` is set to `False` by default, meaning it won't track operations. However, if we set `requires_grad=True`, PyTorch will start to track all operations on the tensor.\n", + "\n", + "Let's start with a simple example:\n", + "\n", + ">**Task:** Create a tensor that holds a single value, let's say 2, and set `requires_grad=True`. Then, define a simple operation like squaring the tensor. Finally, inspect the resulting tensor. The [documentation for requires_grad](https://pytorch.org/docs/stable/autograd.html#torch.Tensor.requires_grad) might be handy." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fe63ab93-55be-434d-822f-8fd9cd727941", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# TODO: Create a tensor, perform a simple operation, and print its data and grad_fn separately.\n" + ] + }, + { + "cell_type": "markdown", + "id": "fa7ee20c-c2d6-4dcf-bb37-9eda580b5dc5", + "metadata": {}, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "To create a tensor with requires_grad=True and square it:\n", + "\n", + "```python\n", + "# TODO: Create a tensor, perform a simple operation, and print its data and grad_fn separately.\n", + "x = torch.tensor([2.0], requires_grad=True)\n", + "y = x ** 2\n", + "print(\"Data:\", y.data)\n", + "print(\"grad_fn:\", y.grad_fn)\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "c14dde16-a6be-4151-94cb-96ae98f0648a", + "metadata": {}, + "source": [ + "Once the operation is executed on a tensor, a new attribute grad_fn is created. This attribute references a function that has created the tensor. In our example, since we squared the tensor, grad_fn will be of type PowBackward0.\n", + "\n", + "This grad_fn attribute provides a link to the computational history of the tensor, allowing PyTorch to backpropagate errors and compute gradients when training neural networks." + ] + }, + { + "cell_type": "markdown", + "id": "0965e79e-558a-45a9-8ab2-614c503e59c0", + "metadata": { + "tags": [] + }, + "source": [ + "#### **Computing Gradients**" + ] + }, + { + "cell_type": "markdown", + "id": "36fb6c5b-9b39-4a2f-a767-61032b1b4ffc", + "metadata": {}, + "source": [ + "Now, let's compute the gradients of `out` with respect to `x`. To do this, we'll call the `backward()` method on the tensor `out`.\n", + "\n", + ">[Task]: Compute the gradients of `out` by calling the `backward()` method on it. Afterwards, print the gradients of `x`. The [documentation for backward()](https://pytorch.org/docs/stable/autograd.html#torch.autograd.backward) may be useful.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "83685760-bde9-4327-88f7-cfe02bdb3309", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# TODO: Compute the gradient and print it." + ] + }, + { + "cell_type": "markdown", + "id": "9b1d104b-efef-4fff-869d-8dde1131868e", + "metadata": { + "tags": [] + }, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "To compute the gradient:\n", + "\n", + "```python\n", + "y.backward()\n", + "print(x.grad)\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "d7f5aecb-8623-481f-a5cf-f8b6dd0c9a37", + "metadata": { + "tags": [] + }, + "source": [ + "#### **Gradient Accumulation**" + ] + }, + { + "cell_type": "markdown", + "id": "1a4df0a1-12a0-4129-a258-915fa8440193", + "metadata": {}, + "source": [ + "In PyTorch, the gradients of tensors are accumulated into the `.grad` attribute each time you call `.backward()`. This means that if you call `.backward()` multiple times, the gradients will add up.\n", + "\n", + "However, by default, calling `.backward()` consumes the computational graph to save memory. If you intend to call `.backward()` multiple times on the same graph, you need to specify `retain_graph=True` during all but the last call.\n", + "\n", + ">[Task]: Create a tensor, perform an operation on it, and then call `backward()` twice. Use `retain_graph=True` in the first call to retain the computational graph. Observe the `.grad` attribute after each call.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "50a04095-9d7e-48ba-90ed-06718cd379f0", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Create a tensor\n", + "w = torch.tensor([1.0], requires_grad=True)\n", + "\n", + "# Operation\n", + "result = w * 2\n", + "\n", + "# TODO: Call backward twice (using retain_graph=True for the first call) and print the grad after each call\n", + "# ...\n" + ] + }, + { + "cell_type": "markdown", + "id": "d699e58d-d479-466a-b592-cbf68d185c3b", + "metadata": {}, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "```python\n", + "result.backward(retain_graph=True)\n", + "print(w.grad) # This should print 2\n", + "\n", + "result.backward()\n", + "print(w.grad) # This should print 4, as gradients get accumulated\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "88d30f87-2469-4289-ad8a-51a25a2e8b82", + "metadata": {}, + "source": [ + "#### **Zeroing Gradients**\n" + ] + }, + { + "cell_type": "markdown", + "id": "2ea93580-9a35-4f5d-8f29-0a324d28d28a", + "metadata": { + "tags": [] + }, + "source": [ + "\n", + "In neural network training, we typically want to update our weights with the gradients after each forward and backward pass. This means that we don't want the gradients to accumulate across multiple passes. Hence, it's common to zero out the gradients at the start of a new iteration.\n", + "\n", + ">[Task]: Using the tensor from the previous cell, zero out its gradients and verify that it has been set to zero.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9cb03a91-d1df-4bbf-a0d2-b5580c643e12", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# TODO: Zero out the gradients of w and print" + ] + }, + { + "cell_type": "markdown", + "id": "4a89ff66-b1ef-413a-a41c-847e8c832e4b", + "metadata": {}, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "```python\n", + "\n", + "w.grad.zero_()\n", + "print(w.grad)\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "85f75515-3d89-4249-b00a-03c13cca92d4", + "metadata": { + "tags": [] + }, + "source": [ + "#### **Non-Scalar Backward**" + ] + }, + { + "cell_type": "markdown", + "id": "86a54a2c-e8c1-4278-a3fe-ed60564ebd07", + "metadata": { + "tags": [] + }, + "source": [ + "When dealing with non-scalar tensors, `backward` requires an additional argument: the gradient of the tensor with respect to some scalar (usually a loss). \n", + "\n", + ">[Task]: Create a tensor of shape (2, 2) with `requires_grad=True`. Compute a non-scalar result by multiplying the tensor with itself. Then, compute backward with a gradient argument. You can consult the [backward documentation](https://pytorch.org/docs/stable/autograd.html#torch.autograd.backward) for reference." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cc0e4271-c356-4a4e-9a3a-5df1403a4211", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# TODO: Create a tensor, perform an operation, and compute backward with a gradient argument" + ] + }, + { + "cell_type": "markdown", + "id": "e7ee72f3-f51c-4849-b41d-136028029185", + "metadata": { + "tags": [] + }, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "```python\n", + "\n", + "v = torch.tensor([[2.0, 3.0], [4.0, 5.0]], requires_grad=True)\n", + "result = v * v\n", + "\n", + "grads = torch.tensor([[1.0, 1.0], [1.0, 1.0]])\n", + "result.backward(grads)\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "2e403021-4854-4e97-9898-82ed355293e7", + "metadata": { + "tags": [] + }, + "source": [ + "#### **Stopping Gradient Tracking**\n" + ] + }, + { + "cell_type": "markdown", + "id": "ba644253-8523-480d-8318-a87047671a21", + "metadata": { + "tags": [] + }, + "source": [ + "\n", + "There are scenarios where we don't want to track the gradients for certain operations. This can be achieved in two main ways:\n", + "\n", + "1. **Using `torch.no_grad()`**: This context manager ensures that the enclosed operations are excluded from gradient tracking.\n", + "2. **Using `.detach()`**: Creates a tensor that shares the same storage but does not require gradients.\n", + "\n", + ">[Task]: Create a tensor with `requires_grad=True`. Then, demonstrate both methods above to prevent gradient computation.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1feb2f9b-0c5f-4e9d-b042-e74052bc83a9", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# TODO: Demonstrate operations without gradient tracking\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "id": "a5eff82b-bfbd-4be7-afa3-dc00f5341568", + "metadata": { + "tags": [] + }, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "```python\n", + "\n", + "# Using torch.no_grad()\n", + "with torch.no_grad():\n", + " result_no_grad = v * v\n", + "print(result_no_grad.requires_grad)\n", + "\n", + "# Using .detach()\n", + "detached_tensor = v.detach()\n", + "result_detach = detached_tensor * detached_tensor\n", + "print(result_detach.requires_grad)\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "efe66a5d-ac63-4623-8182-3b5aff58abbe", + "metadata": { + "tags": [] + }, + "source": [ + "## **Building a Simple Neural Network with PyTorch**" + ] + }, + { + "cell_type": "markdown", + "id": "aa4b7630-fc1e-4f7b-b86b-3c0d233cdc49", + "metadata": { + "jp-MarkdownHeadingCollapsed": true, + "tags": [] + }, + "source": [ + "Neural networks are the cornerstone of deep learning. They are organized as a series of interconnected nodes or \"neurons\" that are structured into layers: an input layer, several hidden layers, and an output layer. Data flows through this network, undergoing transformations at each node, until it emerges at the output.\n", + "\n", + "With PyTorch's `torch.nn` module, constructing these neural networks becomes straightforward. Let's dive into its main components:" + ] + }, + { + "cell_type": "markdown", + "id": "8e98f379-5580-477c-8b7b-c641f5edf710", + "metadata": { + "tags": [] + }, + "source": [ + "### **nn.Module: The Base Class for Neural Networks**" + ] + }, + { + "cell_type": "markdown", + "id": "15d72ea2-c846-44f5-85d5-bd1990c154bc", + "metadata": {}, + "source": [ + "Every neural network in PyTorch is derived from the `nn.Module` class. This class offers:\n", + "- Organization and management of the layers.\n", + "- Capabilities for GPU acceleration.\n", + "- Implementation of the forward pass.\n", + "\n", + "When we inherit from `nn.Module`, our custom neural network class benefits from these functionalities.\n", + "For more details, you can refer to the official [documentation](https://pytorch.org/docs/stable/generated/torch.nn.Module.html).\n", + "\n", + "\n", + "\n", + ">**Task:** Familiarize yourself with the structure of a simple neural network provided below. Later, you'll be enriching it." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "425abefe-54b9-4944-bc6e-cc78de892c66", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "import torch.nn as nn\n", + "\n", + "class SimpleNet(nn.Module):\n", + " def __init__(self, input_size, hidden_size, output_size):\n", + " super(SimpleNet, self).__init__()\n", + " # Define layers here\n", + "\n", + " def forward(self, x):\n", + " # Call the layers in the correct order here\n", + " return x" + ] + }, + { + "cell_type": "markdown", + "id": "892e3b55-097b-436e-bbf8-a380fd7d9e35", + "metadata": { + "tags": [] + }, + "source": [ + "### **Linear Layers: Making Connections**" + ] + }, + { + "cell_type": "markdown", + "id": "564c17bb-543f-42f6-8c5d-b855ccaf71e6", + "metadata": {}, + "source": [ + "In PyTorch, a linear layer performs an affine transformation. It has both weights and biases which get updated during training. The transformation it performs can be described as:\n", + "\n", + "$ y = xA^T + b $\n", + "\n", + "Where:\n", + "- \\( x \\) is the input\n", + "- \\( A \\) represents the weights\n", + "- \\( b \\) is the bias\n", + "\n", + "The `nn.Linear` class in PyTorch creates such a layer.\n", + "\n", + "[Documentation Link for nn.Linear](https://pytorch.org/docs/stable/generated/torch.nn.Linear.html)\n", + "\n", + "\n", + "> **Task:** Add an input layer and an output layer to the `SimpleNet` class. \n", + ">\n", + "> - The input layer should transform from `input_size` to `hidden_size`.\n", + "> - The output layer should transform from `hidden_size` to `output_size`.\n", + "> - After defining the layers in the `__init__` method, call them in the `forward` method to perform the transformations.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "daa8829a-05e9-474e-b6e6-c7f749e22295", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Modify the below code by adding input and output linear layers in the appropriate places\n", + "\n", + "class SimpleNet(nn.Module):\n", + " def __init__(self, input_size, hidden_size, output_size):\n", + " super(SimpleNet, self).__init__()\n", + " # Define layers here\n", + "\n", + " def forward(self, x):\n", + " # Call the layers in the correct order here\n", + " return x\n" + ] + }, + { + "cell_type": "markdown", + "id": "c5038840-2713-4492-b7ab-c70469a2e96e", + "metadata": { + "tags": [] + }, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "To define the input and output linear layers, use the `nn.Linear` class in the `__init__` method:\n", + "\n", + "Then, in the `forward` method, pass the input through the defined layers.\n", + "\n", + "```python\n", + "class SimpleNet(nn.Module):\n", + " def __init__(self, input_size, hidden_size, output_size):\n", + " super(SimpleNet, self).__init__()\n", + " self.input_layer = nn.Linear(input_size, hidden_size)\n", + " self.output_layer = nn.Linear(hidden_size, output_size)\n", + "\n", + " def forward(self, x):\n", + " x = self.input_layer(x)\n", + " x = self.output_layer(x)\n", + " return x\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "c2bb82c9-8949-4472-84fe-def36c514150", + "metadata": { + "tags": [] + }, + "source": [ + "### **Activation Functions: Introducing Non-Linearity**" + ] + }, + { + "cell_type": "markdown", + "id": "d989e2d8-5530-45f3-8664-e0d1b9eb627a", + "metadata": {}, + "source": [ + "Activation functions are critical components in neural networks, introducing non-linearity between layers. This non-linearity allows networks to learn from the error and make adjustments, which is essential for learning complex patterns.\n", + "\n", + "In PyTorch, many activation functions are available as part of the `torch.nn` module, such as ReLU, Sigmoid, and Tanh.\n", + "\n", + "For our `SimpleNet` model, we'll use the ReLU (Rectified Linear Unit) activation function after the input layer. The ReLU function is defined as \\(f(x) = max(0, x)\\).\n", + "\n", + "Learn more about [ReLU and other activation functions in the official documentation](https://pytorch.org/docs/stable/nn.html#non-linear-activations-weighted-sum-nonlinearity).\n", + "\n", + "> **Task**: Update your `SimpleNet` class to include the ReLU activation function after the input layer. For this, you'll need to both define the activation function in `__init__` and apply it in the `forward` method.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9e426301-5a55-46a2-8305-241b8f1ca4bf", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Copy the previous SimpleNet definition and modify the code to include the ReLU activation function." + ] + }, + { + "cell_type": "markdown", + "id": "212ef244-f7bf-49a2-b4c9-b1b90af315de", + "metadata": { + "tags": [] + }, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "To include the ReLU activation in your neural network:\n", + "\n", + "1. Define the ReLU activation function in the `__init__` method.\n", + "2. Apply the activation function in the `forward` method after passing through the `input_layer`.\n", + "\n", + "```python\n", + "class SimpleNet(nn.Module):\n", + " def __init__(self, input_size, hidden_size, output_size):\n", + " super(SimpleNet, self).__init__()\n", + " self.input_layer = nn.Linear(input_size, hidden_size)\n", + " self.relu = nn.ReLU() # Defining the ReLU activation function\n", + " self.output_layer = nn.Linear(hidden_size, output_size)\n", + "\n", + " def forward(self, x):\n", + " x = self.input_layer(x)\n", + " x = self.relu(x) # Applying the ReLU activation function\n", + " x = self.output_layer(x)\n", + " return x\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "640ef2f4-6816-4c5e-955c-c14c33349512", + "metadata": {}, + "source": [ + "#### **Adjusting the Network: Adding Dropout**" + ] + }, + { + "cell_type": "markdown", + "id": "e5596abf-b262-461d-ad5f-6a3488a79a42", + "metadata": { + "tags": [] + }, + "source": [ + "[Dropout](https://pytorch.org/docs/stable/generated/torch.nn.Dropout.html) is a regularization technique that can improve generalization in neural networks. It works by randomly setting a fraction of input units to 0 at each update during training time. \n", + "\n", + "> **Task**: Modify the `SimpleNet` class to include a dropout layer with a dropout probability of 0.5 between the input layer and the output layer. Don't forget to call this layer in the forward method. \n", + ">\n", + "> Remember, after modifying the class structure, you'll need to re-instantiate your model object." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1c68ffd4-1de6-4d77-a15f-705b24c924af", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Add a dropout layer to your previous code" + ] + }, + { + "cell_type": "markdown", + "id": "d78c2dab-95c1-441c-b661-80bfba9a2dfd", + "metadata": {}, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "Here's how you can modify the SimpleNet class to include dropout:\n", + "\n", + "```python\n", + "\n", + "class SimpleNet(nn.Module):\n", + " def __init__(self, input_size, hidden_size, output_size):\n", + " super(SimpleNet, self).__init__()\n", + " self.input_layer = nn.Linear(input_size, hidden_size)\n", + " self.dropout = nn.Dropout(0.5)\n", + " self.output_layer = nn.Linear(hidden_size, output_size)\n", + "\n", + " def forward(self, x):\n", + " x = self.input_layer(x)\n", + " x = self.dropout(x)\n", + " return self.output_layer(x)\n", + " \n", + "model = SimpleNet(input_size, hidden_size, output_size).to(device) \n", + "```\n", + "Don't forget to create a new instance of your model: model = SimpleNet(input_size, hidden_size, output_size).to(device)\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "ce1cb22c-8288-4c69-9dcb-56896de49794", + "metadata": { + "tags": [] + }, + "source": [ + "### **Utilizing the Neural Network**" + ] + }, + { + "cell_type": "markdown", + "id": "255c3bf2-419d-4d14-82d6-7959e9280670", + "metadata": { + "tags": [] + }, + "source": [ + "Once our neural network is defined, it's time to put it to use. This section will cover:\n", + "\n", + "1. Instantiating the network\n", + "2. Transferring the network to GPU (if available)\n", + "3. Making predictions using the network (forward pass)\n", + "4. Understanding training and evaluation modes\n", + "5. Performing a backward pass to compute gradients" + ] + }, + { + "cell_type": "markdown", + "id": "9f28cee5-c7a0-48c5-8341-6da6fae516c5", + "metadata": { + "tags": [] + }, + "source": [ + "#### **1. Instantiating the Network**" + ] + }, + { + "cell_type": "markdown", + "id": "0760bef6-d77a-4b7b-b5c7-18b208d93b98", + "metadata": {}, + "source": [ + "\n", + "To use our `SimpleNet`, we first need to create an instance of it. While creating an instance, the network's weights are also initialized.\n", + "\n", + "> **Task**: Instantiate the `SimpleNet` class. Use `input_size=5`, `hidden_size=3`, and `output_size=1` as parameters." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ae9bfc87-5b09-476c-b32b-92c09f992fe3", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Your code here: Instantiate the model" + ] + }, + { + "cell_type": "markdown", + "id": "f951e5d2-e0b4-451d-9a9b-44256f8a224c", + "metadata": { + "tags": [] + }, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "To instantiate the SimpleNet class:\n", + "\n", + "```python\n", + "\n", + "model = SimpleNet(input_size=5, hidden_size=3, output_size=1)\n", + "print(model)\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "35567e41-6de6-429b-be4b-a14598313aca", + "metadata": { + "tags": [] + }, + "source": [ + "#### **2. Transferring the Network to GPU**\n" + ] + }, + { + "cell_type": "markdown", + "id": "b3f3b3c3-4d7a-46db-9634-1e14b277c808", + "metadata": { + "tags": [] + }, + "source": [ + "\n", + "PyTorch makes it very straightforward to transfer our model to a GPU if one is available. This is done using the .to() method.\n", + "\n", + "> **Task**: Check if GPU (CUDA) is available. If it is, transfer the model to the GPU." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "91cb61a0-d890-4697-88d9-7749ea2bf144", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Check for GPU availability and transfer the model to GPU if available." + ] + }, + { + "cell_type": "markdown", + "id": "8a405f2d-3d8d-4e4c-90d1-54a05ff08b90", + "metadata": {}, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "To transfer the model to the GPU if it's available:\n", + "\n", + "```python\n", + "\n", + "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n", + "model = model.to(device)\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "175ab7cc-cddf-4460-ab01-f0193c2908d7", + "metadata": { + "tags": [] + }, + "source": [ + "#### **3. Making Predictions using the Network (Forward Pass)**" + ] + }, + { + "cell_type": "markdown", + "id": "e3724444-e0a6-48b0-8872-0b53b000a3bd", + "metadata": {}, + "source": [ + "With our model instantiated and potentially on a GPU, we can use it to make predictions. This involves passing some input data through the model, which is commonly referred to as a forward pass.\n", + "\n", + "> **Task**: Create a tensor of size [1, 5] (representing one sample with five features) with random values. Transfer this tensor to the same device as your model (GPU or CPU). Then, pass this tensor through your model to get the prediction." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "00e818ee-72e0-4960-a87e-a27b771d58eb", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Create a tensor, transfer it to the right device, and perform a forward pass.\n" + ] + }, + { + "cell_type": "markdown", + "id": "8bc38fde-0c14-45a6-b237-76ec7beab7f0", + "metadata": {}, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "To make predictions using your model:\n", + "\n", + "```python\n", + "\n", + "# Create a tensor with random values\n", + "input_tensor = torch.randn(1, 5).to(device)\n", + "\n", + "# Pass the tensor through the model\n", + "output = model(input_tensor)\n", + "print(output)\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "fad9f46f-b591-4a2f-b2bf-3b4cf54cf961", + "metadata": { + "tags": [] + }, + "source": [ + "#### **4. Understanding Training and Evaluation Modes**" + ] + }, + { + "cell_type": "markdown", + "id": "2f197278-8d74-4a69-8da9-caf3f952e7bc", + "metadata": {}, + "source": [ + "Every PyTorch model has two modes:\n", + "- `train` mode: In this mode, certain layers like dropout or batch normalization behave differently than during evaluation. For instance, dropout will randomly set a fraction of input units to 0 at each update during training.\n", + "- `eval` mode: Here, the model behaves in a deterministic manner. Dropout layers don't drop activations, and batch normalization uses the entire dataset's statistics instead of the current mini-batch's statistics.\n", + "\n", + "Setting the model to the correct mode is crucial. Let's demonstrate this.\n", + "\n", + "> **Task**: Set your model to `train` mode, then perform a forward pass using the same input tensor multiple times and observe the outputs. Then, set your model to `eval` mode and repeat. Notice any differences?" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4c2d921d-d409-4ae6-8ee4-8376fc9a209d", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Perform the forward passes multiple times with the same input in both modes and observe the outputs." + ] + }, + { + "cell_type": "markdown", + "id": "0dbd65fa-b86b-4516-9fb1-aceae0c9d8a3", + "metadata": { + "tags": [] + }, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "Here's how you can demonstrate the difference:\n", + "\n", + "```python\n", + "# Set to train mode\n", + "model.train()\n", + "\n", + "# Forward pass multiple times\n", + "print(\"Train mode:\")\n", + "for i in range(5):\n", + " print(model(input_tensor))\n", + "\n", + "# Set to eval mode\n", + "model.eval()\n", + "print(\"Eval mode:\")\n", + "# Forward pass multiple times\n", + "for i in range(5):\n", + " print(model(input_tensor))\n", + "```\n", + " \n", + "If there were layers like dropout in your model, you'd notice that the outputs in training mode might differ on each pass, while in evaluation mode, they remain consistent.\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "e8c55be3-71f7-45e7-91d1-c556e8108fef", + "metadata": { + "tags": [] + }, + "source": [ + "## **The Training Procedure in PyTorch**" + ] + }, + { + "cell_type": "markdown", + "id": "eac54af7-c8db-4a19-861b-2eecf68fb44e", + "metadata": { + "tags": [] + }, + "source": [ + "Training a neural network involves several key components: defining a loss function to measure errors, selecting an optimization method to adjust the model's weights, and iterating over the dataset multiple times. In this section, we will break down these components step by step, starting with the basics and moving towards more complex tasks." + ] + }, + { + "cell_type": "markdown", + "id": "3e9231a9-105c-4aed-bfa5-846ddc07245f", + "metadata": { + "tags": [] + }, + "source": [ + "### **Datasets and DataLoaders: Handling and Batching Data**" + ] + }, + { + "cell_type": "markdown", + "id": "8dbc3fcf-5a29-4fd8-9e82-3eaae4c8dc90", + "metadata": {}, + "source": [ + "In PyTorch, the torch.utils.data.Dataset class is used to represent a dataset. This abstract class requires the implementation of two primary methods: __len__ (to return the number of items) and __getitem__ (to return the item at a given index). However, PyTorch provides a utility class, TensorDataset, that wraps tensors in the dataset format, making it easier to use with the DataLoader.\n", + "\n", + "The torch.utils.data.DataLoader class is a more powerful tool, responsible for:\n", + "\n", + "- Batching the data\n", + "- Shuffling the data\n", + "- Loading the data in parallel using multiprocessing workers\n", + "\n", + "Let's wrap some data in a Dataset and use a DataLoader to handle batching and shuffling.\n", + "\n", + "> **Task**: Convert the input and target tensors into a dataset and dataloader. For this exercise, set the batch size to 32.\n", + "\n", + "Below we define synthetic data that is learnable.\n", + "This way, we're essentially modeling the relationship $y=mx+c+noise$ where:\n", + "- $y$ is the target or output.\n", + "- $m$ is the slope of the line.\n", + "- $c$ is the y-intercept.\n", + "- $x$ is the input.\n", + "- $noise$ is a small random value added to each point to make the data more realistic." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f8335e62-e0c0-4381-9c20-1ca8ed78516c", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "num_samples = 1000\n", + "\n", + "# Define the relationship\n", + "m = 2.0\n", + "c = 1.0\n", + "noise_factor = 0.05\n", + "\n", + "\n", + "\n", + "# Generate input tensor\n", + "input_tensor = torch.linspace(-10, 10, num_samples).view(-1, 1)\n", + "\n", + "# Generate target tensor based on the relationship\n", + "target_tensor = m * input_tensor + c + noise_factor * torch.randn(num_samples, 1)\n", + "import matplotlib.pyplot as plt\n", + "plt.figure(figsize=(10,6))\n", + "plt.scatter(input_tensor.numpy(), target_tensor.numpy(), color='blue', marker='o')\n", + "plt.title(\"Synthetic Data Visualization\")\n", + "plt.xlabel(\"Input\")\n", + "plt.ylabel(\"Target\")\n", + "plt.grid(True)\n", + "plt.show()\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9535ad7e-6534-491b-b38d-b61cdd60b39d", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Convert our data into a dataset\n", + "# ...\n", + "\n", + "# Create a data loader for mini-batch training\n", + "# ..." + ] + }, + { + "cell_type": "markdown", + "id": "da99866e-ebd0-403d-8159-8a36d601bf09", + "metadata": {}, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "Use the TensorDataset class from torch.utils.data to wrap your tensors in a dataset format. After defining your dataset, you can use the DataLoader class to create an iterator that will return batches of data.\n", + " \n", + "```python\n", + "from torch.utils.data import DataLoader, TensorDataset\n", + "\n", + "# Convert our data into a dataset\n", + "dataset = TensorDataset(input_tensor, target_tensor)\n", + "\n", + "# Create a data loader for mini-batch training\n", + "batch_size = 32\n", + "data_loader = DataLoader(dataset, batch_size=batch_size, shuffle=True)\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "ea5aee0c-6c8a-485f-b099-9844a28bafa3", + "metadata": { + "tags": [] + }, + "source": [ + "> **Task**: Explore the `dataset` and `data_loader`:\n", + "> 1. Print the total number of samples in the dataset and DataLoader.\n", + "> 2. Iterate one time over both and print the shape of items you retrieve." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "244a8198-60c5-4154-93ab-3d96fbf3488a", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Total number of samples\n", + "# ...\n", + "\n", + "# Dataset elements\n", + "# ...\n", + "\n", + "# DataLoader elements\n", + "# ..." + ] + }, + { + "cell_type": "markdown", + "id": "882438f7-3cc7-4a20-a223-41ede7856ef4", + "metadata": {}, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "When you iterate over the dataset, each item you get from the iteration should be a tuple of (input, target), so you should retrieve two elements each of len 1.\n", + "\n", + "On the other hand, when you iterate over the data_loader, each item you get from the iteration is a mini-batch of data. Thus, the length you get from each iteration should correspond to the batch size you've set (i.e., 5 in our case), except possibly the last batch if the dataset size isn't a perfect multiple of the batch size.\n", + "\n", + "```python\n", + "# Total number of samples\n", + "print(f\"Total samples in dataset: {len(dataset)}\")\n", + "print(f\"Total batches in DataLoader: {len(data_loader)}\")\n", + "\n", + "# Dataset elements\n", + "(index, (data, target)) = next(enumerate(dataset))\n", + "print(f\"Sample {index}: Data shape {data.shape}, Target shape {target.shape}\")\n", + "\n", + "# DataLoader elements\n", + "(index, (batch_data, batch_target)) = next(enumerate(data_loader))\n", + "print(f\"Batch {index}: Data shape {batch_data.shape}, Target shape {batch_target.shape}\")\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "8dc08bb3-e5b2-4a7d-be10-6adc496a812d", + "metadata": { + "tags": [] + }, + "source": [ + "### **Splitting the Dataset: Training, Validation, and Testing Sets**\n" + ] + }, + { + "cell_type": "markdown", + "id": "659a4899-cb14-4a47-b990-ea1a77592102", + "metadata": {}, + "source": [ + "When training neural networks, it's common to split the dataset into at least two sets:\n", + "\n", + "1. **Training Set**: This set is used to train the model, i.e., adjust the weights using gradient descent.\n", + "2. **Validation Set** (optional, but often used): This set is used to evaluate the model during training, allowing for hyperparameter tuning without overfitting.\n", + "3. **Test Set**: This set is used to evaluate the model's performance after training, providing an unbiased assessment of its performance on new, unseen data.\n", + "\n", + "In PyTorch, we can use the `random_split` function from `torch.utils.data` to easily split datasets.\n", + "\n", + "First, let's define the lengths for each split:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "32202871-2911-44e6-8ad6-6d848cb3ede0", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "total_samples = len(dataset)\n", + "train_size = int(0.8 * total_samples)\n", + "val_size = total_samples - train_size" + ] + }, + { + "cell_type": "markdown", + "id": "a1f7a839-8ee0-460f-bef0-87ca30f7409e", + "metadata": {}, + "source": [ + "> **Task**: Using the random_split function, split the dataset into a training set and a validation set using the sizes provided above.\n", + "[Here's the documentation for random_split](https://pytorch.org/docs/stable/data.html#torch.utils.data.random_split).\n", + "> **Task**: Create the train_loader and val_loader" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "50a80fc9-ef6e-4118-ad6a-3dea9d16e94f", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Splitting the dataset\n" + ] + }, + { + "cell_type": "markdown", + "id": "b01bb0d7-17c0-4edd-a2b6-17e4ca74b2aa", + "metadata": {}, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "```python\n", + "\n", + "# Splitting the dataset\n", + "from torch.utils.data import random_split\n", + "train_dataset, val_dataset = random_split(dataset, [train_size, val_size])\n", + "train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)\n", + "val_loader = DataLoader(val_dataset, batch_size=batch_size, shuffle=False)\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "e2729431-701c-4451-931c-2ae0ed58dbb5", + "metadata": { + "tags": [] + }, + "source": [ + "> **Task**: Now, using the provided training and validation datasets, print out the number of samples in each set. Also, fetch one sample from each set and print its shape.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "770c42f6-7a52-4856-a4fe-23a60666389a", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Your code here" + ] + }, + { + "cell_type": "markdown", + "id": "583948e8-898a-4336-92c6-aaddef6adbcf", + "metadata": { + "tags": [] + }, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "```python\n", + "\n", + "# Print number of samples in each set\n", + "print(f\"Number of training samples: {len(train_dataset)}\")\n", + "print(f\"Number of validation samples: {len(val_dataset)}\")\n", + "\n", + "# Fetching one sample from each set and printing its shape\n", + "train_sample, train_target = train_dataset[0]\n", + "print(f\"Training sample shape: {train_sample.shape}, Target shape: {train_target.shape}\")\n", + "\n", + "val_sample, val_target = val_dataset[0]\n", + "print(f\"Validation sample shape: {val_sample.shape}, Target shape: {val_target.shape}\")\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "0fdec6d6-9b32-457d-b8e6-d94d8e020e4f", + "metadata": { + "tags": [] + }, + "source": [ + "### **Loss Functions: Measuring Model Errors**" + ] + }, + { + "cell_type": "markdown", + "id": "899ce66c-e878-4f6a-b37c-34cdeae438a1", + "metadata": {}, + "source": [ + "Every training process needs a metric to determine how well the model's predictions align with the actual data. This metric is called the loss function or cost function. PyTorch provides many [loss functions](https://pytorch.org/docs/stable/nn.html#loss-functions) suitable for different types of tasks.\n", + "\n", + "Different problems might require different loss functions. PyTorch provides a variety of [loss functions](https://pytorch.org/docs/stable/nn.html#loss-functions) suited for different tasks. For instance:\n", + "- **Mean Squared Error (MSE)**: Commonly used for regression tasks.\n", + "- **Cross-Entropy Loss**: Suited for classification tasks.\n", + "\n", + "\n", + "For a simple regression task, a common choice is the Mean Squared Error (MSE) loss. \n", + "\n", + "> **Task**: Familiarize yourself with the [MSE loss documentation](https://pytorch.org/docs/stable/generated/torch.nn.MSELoss.html). You will soon use it in the training loop.\n", + "\n", + "> **Task**: Instantiate the Mean Squared Error (MSE) loss provided by PyTorch for our current neural network." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "692e83d7-7382-4ab2-9caf-daa3a77bfd4d", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Define the loss function.\n" + ] + }, + { + "cell_type": "markdown", + "id": "7fe8dcb5-8a43-4561-88a0-a4a2a2d1bf53", + "metadata": {}, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "To define the MSE loss in PyTorch, you can use:\n", + "\n", + "```python\n", + "\n", + "criterion = nn.MSELoss()\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "e957d999-0a56-4320-808a-05d1af6b81c7", + "metadata": { + "tags": [] + }, + "source": [ + "### **Optimizers: Adjusting Weights**" + ] + }, + { + "cell_type": "markdown", + "id": "d3d4a09d-8838-4fd3-9e16-bfdc5018abde", + "metadata": {}, + "source": [ + "Optimizers adjust the weights of the network based on the gradients computed during backpropagation. Different optimizers might update weights in varying ways. For example, the popular **Stochastic Gradient Descent (SGD)** optimizer simply updates weights in the direction of negative gradients, while **Adam** and **RMSprop** are more advanced optimizers that consider aspects like momentum and weight decay.\n", + "\n", + "PyTorch offers a wide range of [optimizers](https://pytorch.org/docs/stable/optim.html). \n", + "\n", + "\n", + "> **Task**: Review the [SGD optimizer documentation](https://pytorch.org/docs/stable/optim.html#torch.optim.SGD). It will be pivotal in the training loop you'll construct.\n", + "\n", + "> **Task**: For this exercise, let's use the SGD optimizer. Instantiate it, setting our neural network parameters as the ones to be optimized and choosing a learning rate of 0.01.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "39c8dfa8-7ea0-44e4-9429-118a6333bfe1", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Define the optimizer.\n" + ] + }, + { + "cell_type": "markdown", + "id": "05e37f67-519a-4c49-97b3-2fafb7176de1", + "metadata": {}, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "To define the SGD optimizer in PyTorch, you can use:\n", + "\n", + "```python\n", + "optimizer = torch.optim.SGD(model.parameters(), lr=0.0001)\n", + "```\n", + "Because of how simple the task is, you will probably need a really small learning rate to reach good results.\n", + "</details>\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "id": "13b2fb3e-5391-4e66-ba83-55e66935d2aa", + "metadata": { + "tags": [] + }, + "source": [ + "### **Setting Up the Basic Training Loop Function**" + ] + }, + { + "cell_type": "markdown", + "id": "7a364925-b4d9-4ffd-b3f8-be30a5bb1613", + "metadata": { + "jp-MarkdownHeadingCollapsed": true, + "tags": [] + }, + "source": [ + "Having a training loop within a function allows us to reuse the same code structure for different models, datasets, or other training parameters without redundancy. This modular approach also promotes code clarity and maintainability.\n", + "\n", + "Let's define the training loop function which takes the model, data (inputs and targets), loss function, optimizer, and the number of epochs as parameters. The function should return the history of the loss after each epoch.\n", + "\n", + "A typical training loop consists of:\n", + "1. Sending the input through the model (forward pass).\n", + "2. Calculating the loss.\n", + "3. Propagating the loss backward through the model to compute gradients (backward pass).\n", + "4. Updating the weights using the optimizer.\n", + "5. Repeating the steps for several epochs.\n", + "\n", + "\n", + "Training with the entire dataset as one batch can be memory-intensive and sometimes not as effective. Hence, in practice, we usually divide our dataset into smaller chunks or mini-batches and update our weights after each mini-batch.\n", + "\n", + "> **Task**: Create a function named `train_model` that encapsulates the training loop for the `SimpleNet` model. The function should follow the signature the next code cell:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "734864fe-46b6-4435-b58d-19b085ebd3f9", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "def train_model(model, dataloader, loss_function, optimizer, epochs):\n", + " # Your code here\n", + " pass" + ] + }, + { + "cell_type": "markdown", + "id": "a6fee8dc-59da-4d48-918e-d6e093e997e5", + "metadata": {}, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "Here's how the train_model function might look:\n", + "```python\n", + "\n", + "def train_model(model, dataloader, loss_function, optimizer, epochs):\n", + " # Store the loss values at each epoch\n", + " loss_history = []\n", + " \n", + " for epoch in range(epochs):\n", + " for inputs, targets in dataloader:\n", + " # Ensure that data is on the right device\n", + " inputs, targets = inputs.to(device), targets.to(device)\n", + " \n", + " # Reset the gradients to zero\n", + " optimizer.zero_grad()\n", + " \n", + " # Execute a forward pass\n", + " outputs = model(inputs)\n", + " \n", + " # Calculate the loss\n", + " loss = loss_function(outputs, targets)\n", + " \n", + " # Conduct a backward pass\n", + " loss.backward()\n", + " \n", + " # Update the weights\n", + " optimizer.step()\n", + " \n", + " # Append the loss to the history\n", + " loss_history.append(loss.item())\n", + " \n", + " print(f\"Epoch [{epoch+1}/{epochs}], Loss: {loss_history[-1]:.4f}\")\n", + " \n", + " return loss_history\n", + "```\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "c4e4b485-ffa6-487d-8dbc-b0b0590a796a", + "metadata": { + "tags": [] + }, + "source": [ + "### **Training the Neural Network**" + ] + }, + { + "cell_type": "markdown", + "id": "15ba6b07-728f-4444-a3a9-af8cfeb884e1", + "metadata": {}, + "source": [ + "With all the components defined in the previous sections, it's now time to integrate everything and set the training process in motion.\n", + "\n", + "> **Task**: Combine all the previously defined elements to initiate the training procedure for your neural network model.\n", + "> 1. Don't forget to Move your model and to the same device (GPU or CPU).\n", + "> 2. Train the model using the `train_loader` and `val_loader`.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "90d043f7-213d-42a7-a14b-e6b716003b70", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Your code here to initiate the training process\n" + ] + }, + { + "cell_type": "markdown", + "id": "398aaeec-5d6d-4ef6-bd24-27d51b32c148", + "metadata": {}, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "To train the model, you need to integrate all the previously defined components:\n", + "\n", + "```python\n", + "# Moving the model to the device\n", + "model = SimpleNet(input_size=1, hidden_size=10, output_size=1).to(device)\n", + "\n", + "# Training the model using the train_loader\n", + "loss_history = train_model(model, train_loader, criterion, optimizer, epochs=50)\n", + "```\n", + "Make sure you have defined the loss_function, optimizer, and epochs in the previous sections.\n", + "</details>" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c7cf3df1-9fe2-4eee-a5bf-386f77b257f1", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "import matplotlib.pyplot as plt\n", + "\n", + "# Plotting the loss curve\n", + "plt.figure(figsize=(10,6))\n", + "plt.plot(loss_history, label='Training Loss')\n", + "plt.title(\"Loss Curve\")\n", + "plt.xlabel(\"Epochs\")\n", + "plt.ylabel(\"Loss\")\n", + "plt.legend()\n", + "plt.grid(True)\n", + "plt.show()\n" + ] + }, + { + "cell_type": "markdown", + "id": "2b7f9d87-c172-427c-a2f4-1090b1120148", + "metadata": { + "tags": [] + }, + "source": [ + "## **Conclusion: Moving Beyond the Basics**" + ] + }, + { + "cell_type": "markdown", + "id": "6074877c-c149-4af9-8503-153455edd42a", + "metadata": {}, + "source": [ + "\n", + "You've now built and trained a simple neural network using PyTorch, and you might be wondering: why aren't my results as good as I expected?\n", + "\n", + "While you've certainly made strides, the journey of mastering deep learning and neural networks is filled with nuance, challenges, and constant learning. Here are some reasons why your results might not be optimal and what you'll discover in your next steps:\n", + "\n", + "1. **Hyperparameters Tuning**: So far, we've set values like learning rate and batch size somewhat arbitrarily. These values are critical and often require careful tuning specific to each problem. \n", + "\n", + "2. **Learning Rate Scheduling**: A fixed learning rate might not always be the best strategy. Reducing the learning rate during training, known as learning rate annealing or scheduling, often leads to better convergence.\n", + "\n", + "3. **Model Architecture**: The neural network we built is basic. There's an entire world of architectures out there, designed for specific types of data and tasks. The right architecture can make a significant difference.\n", + "\n", + "4. **Regularization**: To prevent overfitting, techniques like dropout, weight decay, and early stopping can be applied. We haven't touched upon these, but they're crucial for ensuring your model generalizes well to unseen data.\n", + "\n", + "5. **Data Quality and Quantity**: While we used synthetic data for simplicity, real-world data is messy. Cleaning and preprocessing data, augmenting it, and ensuring it's representative can have a significant impact on performance.\n", + "\n", + "6. **Optimization Techniques**: There are advanced optimization algorithms and techniques that can speed up training and lead to better convergence. Techniques like momentum, adaptive learning rates (e.g., Adam, RMSprop) can play a crucial role.\n", + "\n", + "7. **Evaluation Metrics**: We've looked at loss values, but in real-world scenarios, understanding and selecting the right evaluation metrics for the task (accuracy, F1-score, AUC-ROC, etc.) is vital. \n", + "\n", + "8. **Training Dynamics**: Understanding how models train, visualizing the activations, weights, and gradients, and knowing when and why a model is struggling can offer insights into how to improve performance.\n", + "\n", + "Remember, while the mechanics of building and training a neural network are essential, the art of deep learning lies in understanding the nuances and iterating based on insights and knowledge. The next steps in your learning, focusing on methodology, will provide the tools and knowledge to navigate these complexities and achieve better results.\n", + "\n", + "Keep learning, experimenting, and iterating! The world of deep learning is vast, and there's always something new to discover." + ] + }, + { + "cell_type": "markdown", + "id": "ca6048e4-f3cf-40eb-bd50-c95f281f0554", + "metadata": { + "tags": [] + }, + "source": [ + "## **Extra for the Fast Movers: Diving Deeper**" + ] + }, + { + "cell_type": "markdown", + "id": "46a25dfd-1cc9-444d-98d6-966e7cc9da07", + "metadata": {}, + "source": [ + "To further enhance your understanding and capability with PyTorch, this section introduces additional topics that cater to more advanced use-cases. These tools and techniques can be essential when dealing with larger and more complex projects, providing valuable insights into optimization and performance." + ] + }, + { + "cell_type": "markdown", + "id": "30edeed8-321b-4b1f-ace6-0decd8a167e5", + "metadata": { + "tags": [] + }, + "source": [ + "### **Profiling with PyTorch Profiler in TensorBoard**" + ] + }, + { + "cell_type": "markdown", + "id": "256bd4a2-aa6f-4a50-9c5d-854ca25293de", + "metadata": {}, + "source": [ + "PyTorch, starting from version 1.9.0, incorporates the PyTorch Profiler as a TensorBoard plugin. This integration allows users to profile their PyTorch code and visualize the results directly within TensorBoard.\n", + "Below, we will be instrumenting PyTorch Code for TensorBoard Profiling.\n", + "\n", + "Use this [documentation](http://www.idris.fr/jean-zay/pre-post/profiler_pt.html) to achieve the next tasks.\n", + "\n", + "> **Task:** Before instrumenting your PyTorch code, you'll need to import the necessary modules for profiling.\n", + "\n", + "> **Task:** Modify the training loop to invoke the profiler. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "86b471a6-7de6-40f0-af58-c41e8e8acbae", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Your imports here\n", + "\n", + "# Your code here\n", + "def train_model_with_profiling(model, train_loader, criterion, optimizer, epochs, profiler_dir='./profiler'):\n", + " # Your code here\n", + " pass" + ] + }, + { + "cell_type": "markdown", + "id": "f389816a-fa2a-4668-9f0b-07d2a5abf5e1", + "metadata": { + "tags": [] + }, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "```python\n", + "from torch.profiler import profile, tensorboard_trace_handler, ProfilerActivity, schedule\n", + "\n", + "def train_model_with_profiling(model, dataloader, loss_function, optimizer, epochs, profiler_dir='./profiler'):\n", + " # Store the loss values at each epoch\n", + " loss_history = []\n", + " \n", + " with profile(activities=[ProfilerActivity.CPU, ProfilerActivity.CUDA],\n", + " schedule=schedule(wait=1, warmup=1, active=12, repeat=1),\n", + " on_trace_ready=tensorboard_trace_handler(profiler_dir)) as prof:\n", + " for epoch in range(epochs):\n", + " for inputs, targets in dataloader:\n", + " # Ensure that data is on the right device\n", + " inputs, targets = inputs.to(device), targets.to(device)\n", + " \n", + " # Reset the gradients to zero\n", + " optimizer.zero_grad()\n", + " \n", + " # Execute a forward pass\n", + " outputs = model(inputs)\n", + " \n", + " # Calculate the loss\n", + " loss = loss_function(outputs, targets)\n", + " \n", + " # Conduct a backward pass\n", + " loss.backward()\n", + " \n", + " # Update the weights\n", + " optimizer.step()\n", + " \n", + " # Append the loss to the history\n", + " loss_history.append(loss.item())\n", + " \n", + " # Notify profiler of step boundary\n", + " prof.step()\n", + " \n", + " print(f\"Epoch [{epoch+1}/{epochs}], Loss: {loss_history[-1]:.4f}\")\n", + " \n", + " return loss_history\n", + "```\n", + "Make sure you have defined the loss_function, optimizer, and epochs in the previous sections.\n", + "</details>" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cb82f0a9-522f-4746-87f9-ba7b7952d863", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Training the model using the train_loader\n", + "loss_history = train_model_with_profiling(model, train_loader, criterion, optimizer, 10, profiler_dir='./profiler')" + ] + }, + { + "cell_type": "markdown", + "id": "313e4f40-521a-4beb-a278-c1ca9502b499", + "metadata": {}, + "source": [ + "> **Task:** Visualize the profiling, you will need to open a Tensorboard interface using the Blue button on the top left corner.\n", + ">\n", + "> **Make sur to specify the logdir with \"--logid=/path/to/profiler_folder\".**" + ] + }, + { + "cell_type": "markdown", + "id": "06f86768-3b78-4874-b083-64bc365080fb", + "metadata": { + "tags": [] + }, + "source": [ + "### **Learning Rate Scheduling**" + ] + }, + { + "cell_type": "markdown", + "id": "44721444-ba4a-44d0-9b65-16890dd4f097", + "metadata": {}, + "source": [ + "One of the key hyperparameters to tune during neural network training is the learning rate. While it's possible to set a static learning rate for the entire training process, in practice, dynamically adjusting the learning rate often leads to better convergence and overall performance. This dynamic adjustment is often referred to as learning rate scheduling or annealing.\n", + "Concept of Learning Rate Scheduling\n", + "\n", + "The learning rate determines the step size at each iteration while moving towards a minimum of the loss function. If it's too large, the optimization might overshoot the minimum. Conversely, if it's too small, the training might get stuck, or convergence could be very slow.\n", + "\n", + "A learning rate scheduler changes the learning rate during training based on the provided scheduling policy. By adjusting the learning rate during training, you can achieve faster convergence and better final results.\n", + "Using Learning Rate Schedulers in PyTorch\n", + "\n", + "PyTorch provides a variety of learning rate schedulers through the torch.optim.lr_scheduler module. Some of the popular ones are:\n", + "- StepLR: Decays the learning rate of each parameter group by gamma every step_size epochs.\n", + "- ExponentialLR: Decays the learning rate of each parameter group by gamma every epoch.\n", + "- ReduceLROnPlateau: Reduces the learning rate when a metric has stopped improving.\n", + "\n", + "> **Task:** Take a look at the [documentation]() or click on the hint in the following cell then integrate an LR scheduler in your own code that you wrote before " + ] + }, + { + "cell_type": "markdown", + "id": "0c79a170-35d0-438f-b01b-a3f236f8b724", + "metadata": { + "tags": [] + }, + "source": [ + "\n", + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "Below, you have a typical training loop with a learning rate scheduler.\n", + " \n", + "```python\n", + "from torch.optim.lr_scheduler import StepLR\n", + "optimizer = torch.optim.SGD(model.parameters(), lr=0.1)\n", + "scheduler = StepLR(optimizer, step_size=10, gamma=0.1)\n", + "for epoch in range(epochs):\n", + " for input, target in data:\n", + " optimizer.zero_grad()\n", + " output = model(input)\n", + " loss = loss_fn(output, target)\n", + " loss.backward()\n", + " optimizer.step()\n", + " \n", + " # Step the learning rate scheduler\n", + " scheduler.step()```\n", + "</details>\n" + ] + }, + { + "cell_type": "markdown", + "id": "33f99f6e-3120-495a-a25b-8b9f3d14deb2", + "metadata": { + "tags": [] + }, + "source": [ + "### **Automatic Mixed Precision**" + ] + }, + { + "cell_type": "markdown", + "id": "217a7249-6655-4587-92b8-72dea7de8c9d", + "metadata": {}, + "source": [ + "Training deep neural networks can be both time-consuming and resource-intensive. One way to address this problem is by leveraging mixed precision training. In essence, mixed precision training uses both 16-bit and 32-bit floating-point types to represent numbers in the model, which can speed up training without sacrificing the accuracy of the final model.\n", + "\n", + "**Overview of AMP (Automatic Mixed Precision)**\n", + "\n", + "AMP (Automatic Mixed Precision) is a set of utilities provided by PyTorch to enable mixed precision training more effortlessly. The main advantages of AMP are:\n", + "- Faster Training: By using reduced precision, the model requires less memory bandwidth, resulting in faster data transfers and faster matrix multiplication.\n", + "- Reduced GPU Memory Usage: This enables training of larger models or utilization of larger batch sizes.\n", + "\n", + "PyTorch has integrated the AMP utilities starting from version 1.6.\n", + "\n", + "> **Task**: Setup AMP in the training function by checking the [documentation](http://www.idris.fr/eng/ia/mixed-precision-eng.html). You will need to do the necessary imports, initialize the GradScaler, modify the training loop by including \"with autocast():\" around the forward and loss computation." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ad131b4b-02ba-472d-af78-a048868e3efc", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Your code here" + ] + }, + { + "cell_type": "markdown", + "id": "de38cb30-7b24-48cb-b804-ed296e38e3fb", + "metadata": { + "tags": [] + }, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "Below, you have a typical training loop with autocast.\n", + " \n", + "```python\n", + "from torch.cuda.amp import autocast, GradScaler\n", + "scaler = GradScaler()\n", + "for epoch in epochs:\n", + " for input, target in data:\n", + " optimizer.zero_grad()\n", + " \n", + " with autocast():\n", + " output = model(input)\n", + " loss = loss_fn(output, target)\n", + " \n", + " scaler.scale(loss).backward()\n", + " scaler.step(optimizer)\n", + " scaler.update()\n", + "```\n", + "</details>\n" + ] + }, + { + "cell_type": "markdown", + "id": "a3f7818a-fea1-4a12-b52a-cd83e0ae2ffe", + "metadata": {}, + "source": [ + "### **Pytorch Compiler**" + ] + }, + { + "cell_type": "markdown", + "id": "dbb5f69b-009e-40b3-94f0-5a420afbd003", + "metadata": {}, + "source": [ + "**For this section, you will need to use Pytorch with a version superior to 2.0.**\n", + "\n", + "PyTorch, a widely adopted deep learning framework, has consistently evolved to offer users better performance and ease of use. One such advancement is the introduction of the PyTorch Compiler. This cutting-edge feature accelerates PyTorch code execution by JIT-compiling it into optimized kernels. What's even more impressive is its ability to enhance performance with minimal modifications to the original codebase.\n", + "\n", + "Historically, PyTorch has introduced compiler solutions like TorchScript and FX Tracing. However, the introduction of torch.compile with PyTorch 2.0 has taken performance optimization to a new level. It provides a seamless experience, enabling you to transform typical PyTorch functions and even torch.nn.Module instances into their faster, compiled counterparts.\n", + "\n", + "For those eager to dive deep into its workings and benefits, detailed documentation and tutorials have been made available:\n", + "- [torch.compile Tutorial](https://pytorch.org/tutorials/intermediate/torch_compile_tutorial.html)\n", + "- [PyTorch 2.0 Release Notes](https://pytorch.org/get-started/pytorch-2.0/)\n", + "\n", + "> **Task:** Your task is to make your existing PyTorch model take advantage of the performance benefits offered by torch.compile. This will not only make your model run faster but also give you hands-on experience with one of the latest features in PyTorch." + ] + }, + { + "cell_type": "markdown", + "id": "8d5236bc-08e4-4142-8c9c-fd7007474ff2", + "metadata": {}, + "source": [ + "<details>\n", + "<summary>Hint (click to reveal)</summary>\n", + "\n", + "1. **Ensure Dependencies**:\n", + " - Ensure that you have the required dependencies, especially PyTorch version 2.0 or higher.\n", + "\n", + "2. **Check for GPU Compatibility**:\n", + " - For optimal performance, it's recommended to use a modern NVIDIA GPU (H100, A100, or V100).\n", + "\n", + "3. **Compile Functions**:\n", + " - You can optimize arbitrary Python functions as shown in the example:\n", + " ```python\n", + " def your_function(x, y):\n", + " # ... Your PyTorch code here ...\n", + " opt_function = torch.compile(your_function)\n", + " ```\n", + "\n", + " - Alternatively, use the decorator approach:\n", + " ```python\n", + " @torch.compile\n", + " def opt_function(x, y):\n", + " # ... Your PyTorch code here ...\n", + " ```\n", + "\n", + "4. **Compile Modules**:\n", + " - If you have a PyTorch module (a class derived from `torch.nn.Module`), you can compile it similarly:\n", + " ```python\n", + " class YourModule(torch.nn.Module):\n", + " # ... Your module definition here ...\n", + "\n", + " model = YourModule()\n", + " opt_model = torch.compile(model)\n", + " ```\n", + "\n", + "</details>" + ] + }, + { + "cell_type": "markdown", + "id": "bd4066a6-3f24-4b63-b2be-da0350ec6145", + "metadata": {}, + "source": [ + "Remember, while torch.compile optimizes performance, the underlying logic remains the same. Ensure to test and validate your compiled model's outputs against the original to confirm consistent behavior." + ] + }, + { + "cell_type": "markdown", + "id": "4340d5df", + "metadata": {}, + "source": [ + "---\n", + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "pytorch-gpu-2.0.1_py3.10.12", + "language": "python", + "name": "module-conda-env-pytorch-gpu-2.0.1_py3.10.12" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/Misc/Using-Tensorboard.ipynb b/Misc/04-Using-Tensorboard.ipynb similarity index 93% rename from Misc/Using-Tensorboard.ipynb rename to Misc/04-Using-Tensorboard.ipynb index a0bbf29..f6ef193 100644 --- a/Misc/Using-Tensorboard.ipynb +++ b/Misc/04-Using-Tensorboard.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [TSB1] - Tensorboard with/from Jupyter \n", "<!-- DESC --> 4 ways to use Tensorboard from the Jupyter environment\n", @@ -62,7 +62,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/Misc/Scratchbook.ipynb b/Misc/99-Scratchbook.ipynb similarity index 97% rename from Misc/Scratchbook.ipynb rename to Misc/99-Scratchbook.ipynb index cbe135d..9e61e8e 100644 --- a/Misc/Scratchbook.ipynb +++ b/Misc/99-Scratchbook.ipynb @@ -5,7 +5,7 @@ "id": "alpha-bahrain", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [SCRATCH1] - Scratchbook\n", "<!-- DESC --> A scratchbook for small examples\n", @@ -294,7 +294,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/Optimization/01-Apprentissages-rapides-et-Optimisations.ipynb b/Optimization/01-Apprentissages-rapides-et-Optimisations.ipynb index 5fdd218..a91cd16 100755 --- a/Optimization/01-Apprentissages-rapides-et-Optimisations.ipynb +++ b/Optimization/01-Apprentissages-rapides-et-Optimisations.ipynb @@ -7,7 +7,7 @@ "id": "EBL97zOSNOUb" }, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [OPT1] - Training setup optimization\n", "<!-- DESC --> The goal of this notebook is to go through a typical deep learning model training\n", diff --git a/IRIS/01-Simple-Perceptron.ipynb b/Perceptron/01-Simple-Perceptron.ipynb similarity index 97% rename from IRIS/01-Simple-Perceptron.ipynb rename to Perceptron/01-Simple-Perceptron.ipynb index aacbfe2..aab34c2 100644 --- a/IRIS/01-Simple-Perceptron.ipynb +++ b/Perceptron/01-Simple-Perceptron.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [PER57] - Perceptron Model 1957\n", "<!-- DESC --> Example of use of a Perceptron, with sklearn and IRIS dataset of 1936 !\n", @@ -206,7 +206,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/README.ipynb b/README.ipynb index 2ed8a2b..18f0ab1 100644 --- a/README.ipynb +++ b/README.ipynb @@ -3,13 +3,13 @@ { "cell_type": "code", "execution_count": 1, - "id": "c36f1d62", + "id": "c7f25450", "metadata": { "execution": { - "iopub.execute_input": "2023-04-12T07:28:58.304935Z", - "iopub.status.busy": "2023-04-12T07:28:58.304172Z", - "iopub.status.idle": "2023-04-12T07:28:58.313981Z", - "shell.execute_reply": "2023-04-12T07:28:58.313154Z" + "iopub.execute_input": "2023-10-31T17:24:12.865964Z", + "iopub.status.busy": "2023-10-31T17:24:12.865178Z", + "iopub.status.idle": "2023-10-31T17:24:12.875759Z", + "shell.execute_reply": "2023-10-31T17:24:12.874784Z" }, "jupyter": { "source_hidden": true @@ -21,7 +21,7 @@ "text/markdown": [ "<a name=\"top\"></a>\n", "\n", - "[<img width=\"600px\" src=\"fidle/img/00-Fidle-titre-01.svg\"></img>](#top)\n", + "[<img width=\"600px\" src=\"fidle/img/title.svg\"></img>](#top)\n", "\n", "<!-- --------------------------------------------------- -->\n", "<!-- To correctly view this README under Jupyter Lab -->\n", @@ -44,7 +44,7 @@ "For more information, see **https://fidle.cnrs.fr** :\n", "- **[Fidle site](https://fidle.cnrs.fr)**\n", "- **[Presentation of the training](https://fidle.cnrs.fr/presentation)**\n", - "- **[Program 2022/2023](https://fidle.cnrs.fr/programme)**\n", + "- **[Detailed program](https://fidle.cnrs.fr/programme)**\n", "- [Subscribe to the list](https://fidle.cnrs.fr/listeinfo), to stay informed !\n", "- [Find us on youtube](https://fidle.cnrs.fr/youtube)\n", "- [Corrected notebooks](https://fidle.cnrs.fr/done)\n", @@ -52,14 +52,14 @@ "For more information, you can contact us at : \n", "[<img width=\"200px\" style=\"vertical-align:middle\" src=\"fidle/img/00-Mail_contact.svg\"></img>](#top)\n", "\n", - "Current Version : <!-- VERSION_BEGIN -->2.2.4<!-- VERSION_END -->\n", + "Current Version : <!-- VERSION_BEGIN -->2.4.0<!-- VERSION_END -->\n", "\n", "\n", "## Course materials\n", "\n", "| | | | |\n", "|:--:|:--:|:--:|:--:|\n", - "| **[<img width=\"50px\" src=\"fidle/img/00-Fidle-pdf.svg\"></img><br>Course slides](https://fidle.cnrs.fr/supports)**<br>The course in pdf format<br>(12 Mo)| **[<img width=\"50px\" src=\"fidle/img/00-Notebooks.svg\"></img><br>Notebooks](https://fidle.cnrs.fr/notebooks)**<br> Get a Zip or clone this repository <br>(40 Mo)| **[<img width=\"50px\" src=\"fidle/img/00-Datasets-tar.svg\"></img><br>Datasets](https://fidle.cnrs.fr/datasets-fidle.tar)**<br>All the needed datasets<br>(1.2 Go)|**[<img width=\"50px\" src=\"fidle/img/00-Videos.svg\"></img><br>Videos](https://fidle.cnrs.fr/youtube)**<br> Our Youtube channel <br> |\n", + "| **[<img width=\"50px\" src=\"fidle/img/00-Fidle-pdf.svg\"></img><br>Course slides](https://fidle.cnrs.fr/supports)**<br>The course in pdf format<br>| **[<img width=\"50px\" src=\"fidle/img/00-Notebooks.svg\"></img><br>Notebooks](https://fidle.cnrs.fr/notebooks)**<br> Get a Zip or clone this repository <br>| **[<img width=\"50px\" src=\"fidle/img/00-Datasets-tar.svg\"></img><br>Datasets](https://fidle.cnrs.fr/datasets-fidle.tar)**<br>All the needed datasets<br>|**[<img width=\"50px\" src=\"fidle/img/00-Videos.svg\"></img><br>Videos](https://fidle.cnrs.fr/youtube)**<br> Our Youtube channel <br> |\n", "\n", "Have a look about **[How to get and install](https://fidle.cnrs.fr/installation)** these notebooks and datasets.\n", "\n", @@ -67,7 +67,7 @@ "## Jupyter notebooks\n", "\n", "<!-- TOC_BEGIN -->\n", - "<!-- Automatically generated on : 12/04/23 09:28:57 -->\n", + "<!-- Automatically generated on : 31/10/23 18:24:11 -->\n", "\n", "### Linear and logistic regression\n", "- **[LINR1](LinearReg/01-Linear-Regression.ipynb)** - [Linear regression with direct resolution](LinearReg/01-Linear-Regression.ipynb) \n", @@ -80,21 +80,41 @@ "Simple example of logistic regression with a sklearn solution\n", "\n", "### Perceptron Model 1957\n", - "- **[PER57](IRIS/01-Simple-Perceptron.ipynb)** - [Perceptron Model 1957](IRIS/01-Simple-Perceptron.ipynb) \n", + "- **[PER57](Perceptron/01-Simple-Perceptron.ipynb)** - [Perceptron Model 1957](Perceptron/01-Simple-Perceptron.ipynb) \n", "Example of use of a Perceptron, with sklearn and IRIS dataset of 1936 !\n", "\n", - "### Basic regression using DN\n", - "- **[BHPD1](BHPD/01-DNN-Regression.ipynb)** - [Regression with a Dense Network (DNN)](BHPD/01-DNN-Regression.ipynb) \n", + "### BHPD regression (DNN), using Keras\n", + "- **[KBHPD1](BHPD.Keras/01-DNN-Regression.ipynb)** - [Regression with a Dense Network (DNN)](BHPD.Keras/01-DNN-Regression.ipynb) \n", "Simple example of a regression with the dataset Boston Housing Prices Dataset (BHPD)\n", - "- **[BHPD2](BHPD/02-DNN-Regression-Premium.ipynb)** - [Regression with a Dense Network (DNN) - Advanced code](BHPD/02-DNN-Regression-Premium.ipynb) \n", + "- **[KBHPD2](BHPD.Keras/02-DNN-Regression-Premium.ipynb)** - [Regression with a Dense Network (DNN) - Advanced code](BHPD.Keras/02-DNN-Regression-Premium.ipynb) \n", "A more advanced implementation of the precedent example\n", - "- **[WINE1](BHPD/03-DNN-Wine-Regression.ipynb)** - [Wine quality prediction with a Dense Network (DNN)](BHPD/03-DNN-Wine-Regression.ipynb) \n", + "\n", + "### BHPD regression (DNN), using PyTorch\n", + "- **[PBHPD1](BHPD.PyTorch/01-DNN-Regression_PyTorch.ipynb)** - [Regression with a Dense Network (DNN)](BHPD.PyTorch/01-DNN-Regression_PyTorch.ipynb) \n", + "A Simple regression with a Dense Neural Network (DNN) using Pytorch - BHPD dataset\n", + "\n", + "### Wine Quality prediction (DNN), using Keras\n", + "- **[KWINE1](Wine.Keras/01-DNN-Wine-Regression.ipynb)** - [Wine quality prediction with a Dense Network (DNN)](Wine.Keras/01-DNN-Wine-Regression.ipynb) \n", + "Another example of regression, with a wine quality prediction!\n", + "\n", + "### Wine Quality prediction (DNN), using PyTorch\n", + "- **[LWINE1](Wine.Lightning/01-DNN-Wine-Regression-lightning.ipynb)** - [Wine quality prediction with a Dense Network (DNN) using Lightning](Wine.Lightning/01-DNN-Wine-Regression-lightning.ipynb) \n", "Another example of regression, with a wine quality prediction!\n", "\n", - "### Basic classification using a DN\n", - "- **[MNIST1](MNIST/01-DNN-MNIST.ipynb)** - [Simple classification with DNN](MNIST/01-DNN-MNIST.ipynb) \n", + "### MNIST classification (DNN,CNN), using Keras\n", + "- **[KMNIST1](MNIST.Keras/01-DNN-MNIST.ipynb)** - [Simple classification with DNN](MNIST.Keras/01-DNN-MNIST.ipynb) \n", + "An example of classification using a dense neural network for the famous MNIST dataset\n", + "- **[KMNIST2](MNIST.Keras/02-CNN-MNIST.ipynb)** - [Simple classification with CNN](MNIST.Keras/02-CNN-MNIST.ipynb) \n", + "An example of classification using a convolutional neural network for the famous MNIST dataset\n", + "\n", + "### MNIST classification (DNN,CNN), using PyTorch\n", + "- **[PMNIST1](MNIST.PyTorch/01-DNN-MNIST_PyTorch.ipynb)** - [Simple classification with DNN](MNIST.PyTorch/01-DNN-MNIST_PyTorch.ipynb) \n", + "Example of classification with a fully connected neural network, using Pytorch\n", + "\n", + "### MNIST classification (DNN,CNN), using Lightning\n", + "- **[LMNIST1](MNIST.Lightning/01-DNN-MNIST_Lightning.ipynb)** - [Simple classification with DNN using Pytorch Lightning](MNIST.Lightning/01-DNN-MNIST_Lightning.ipynb) \n", "An example of classification using a dense neural network for the famous MNIST dataset\n", - "- **[MNIST2](MNIST/02-CNN-MNIST.ipynb)** - [Simple classification with CNN](MNIST/02-CNN-MNIST.ipynb) \n", + "- **[LMNIST2](MNIST.Lightning/02-CNN-MNIST_Lightning.ipynb)** - [Simple classification with CNN using Pytorch Lightning](MNIST.Lightning/02-CNN-MNIST_Lightning.ipynb) \n", "An example of classification using a convolutional neural network for the famous MNIST dataset\n", "\n", "### Images classification with Convolutional Neural Networks (CNN)\n", @@ -117,7 +137,7 @@ "- **[GTSRB11](GTSRB/batch_slurm.sh)** - [SLURM batch script](GTSRB/batch_slurm.sh) \n", "Bash script for a Slurm batch submission of an ipython code\n", "\n", - "### Sentiment analysis with word embeddin\n", + "### Sentiment analysis with word embedding\n", "- **[IMDB1](IMDB/01-One-hot-encoding.ipynb)** - [Sentiment analysis with hot-one encoding](IMDB/01-One-hot-encoding.ipynb) \n", "A basic example of sentiment analysis with sparse encoding, using a dataset from Internet Movie Database (IMDB)\n", "- **[IMDB2](IMDB/02-Keras-embedding.ipynb)** - [Sentiment analysis with text embedding](IMDB/02-Keras-embedding.ipynb) \n", @@ -187,17 +207,19 @@ "- **[DRL2](DRL/FIDLE_rl_baselines_zoo.ipynb)** - [RL Baselines3 Zoo: Training in Colab](DRL/FIDLE_rl_baselines_zoo.ipynb) \n", "Demo of Stable baseline3 with Colab\n", "\n", - "### Miscellaneous\n", - "- **[ACTF1](Misc/Activation-Functions.ipynb)** - [Activation functions](Misc/Activation-Functions.ipynb) \n", - "Some activation functions, with their derivatives.\n", - "- **[NP1](Misc/Numpy.ipynb)** - [A short introduction to Numpy](Misc/Numpy.ipynb) \n", + "### Miscellaneous things, but very important!\n", + "- **[NP1](Misc/00-Numpy.ipynb)** - [A short introduction to Numpy](Misc/00-Numpy.ipynb) \n", "Numpy is an essential tool for the Scientific Python.\n", - "- **[SCRATCH1](Misc/Scratchbook.ipynb)** - [Scratchbook](Misc/Scratchbook.ipynb) \n", - "A scratchbook for small examples\n", - "- **[TSB1](Misc/Using-Tensorboard.ipynb)** - [Tensorboard with/from Jupyter ](Misc/Using-Tensorboard.ipynb) \n", - "4 ways to use Tensorboard from the Jupyter environment\n", - "- **[PANDAS1](Misc/Using-pandas.ipynb)** - [Quelques exemples avec Pandas](Misc/Using-pandas.ipynb) \n", + "- **[ACTF1](Misc/01-Activation-Functions.ipynb)** - [Activation functions](Misc/01-Activation-Functions.ipynb) \n", + "Some activation functions, with their derivatives.\n", + "- **[PANDAS1](Misc/02-Using-pandas.ipynb)** - [Quelques exemples avec Pandas](Misc/02-Using-pandas.ipynb) \n", "pandas is another essential tool for the Scientific Python.\n", + "- **[PYTORCH1](Misc/03-Using-Pytorch.ipynb)** - [Practical Lab : PyTorch](Misc/03-Using-Pytorch.ipynb) \n", + "PyTorch est l'un des principaux framework utilisé dans le Deep Learning\n", + "- **[TSB1](Misc/04-Using-Tensorboard.ipynb)** - [Tensorboard with/from Jupyter ](Misc/04-Using-Tensorboard.ipynb) \n", + "4 ways to use Tensorboard from the Jupyter environment\n", + "- **[SCRATCH1](Misc/99-Scratchbook.ipynb)** - [Scratchbook](Misc/99-Scratchbook.ipynb) \n", + "A scratchbook for small examples\n", "<!-- TOC_END -->\n", "\n", "\n", @@ -215,7 +237,7 @@ "\n", "\n", "----\n", - "[<img width=\"80px\" src=\"fidle/img/00-Fidle-logo-01.svg\"></img>](#top)\n" + "[<img width=\"80px\" src=\"fidle/img/logo-paysage.svg\"></img>](#top)\n" ], "text/plain": [ "<IPython.core.display.Markdown object>" @@ -229,7 +251,7 @@ "from IPython.display import display,Markdown\n", "display(Markdown(open('README.md', 'r').read()))\n", "#\n", - "# This README is visible under Jupiter Lab ;-)# Automatically generated on : 12/04/23 09:28:57" + "# This README is visible under Jupiter Lab ;-)# Automatically generated on : 31/10/23 18:24:11" ] } ], diff --git a/README.md b/README.md index 48da9b1..0973027 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ <a name="top"></a> -[<img width="600px" src="fidle/img/00-Fidle-titre-01.svg"></img>](#top) +[<img width="600px" src="fidle/img/title.svg"></img>](#top) <!-- --------------------------------------------------- --> <!-- To correctly view this README under Jupyter Lab --> @@ -23,7 +23,7 @@ The objectives of this training are : For more information, see **https://fidle.cnrs.fr** : - **[Fidle site](https://fidle.cnrs.fr)** - **[Presentation of the training](https://fidle.cnrs.fr/presentation)** -- **[Program 2022/2023](https://fidle.cnrs.fr/programme)** +- **[Detailed program](https://fidle.cnrs.fr/programme)** - [Subscribe to the list](https://fidle.cnrs.fr/listeinfo), to stay informed ! - [Find us on youtube](https://fidle.cnrs.fr/youtube) - [Corrected notebooks](https://fidle.cnrs.fr/done) @@ -31,14 +31,14 @@ For more information, see **https://fidle.cnrs.fr** : For more information, you can contact us at : [<img width="200px" style="vertical-align:middle" src="fidle/img/00-Mail_contact.svg"></img>](#top) -Current Version : <!-- VERSION_BEGIN -->2.2.4<!-- VERSION_END --> +Current Version : <!-- VERSION_BEGIN -->2.4.0<!-- VERSION_END --> ## Course materials | | | | | |:--:|:--:|:--:|:--:| -| **[<img width="50px" src="fidle/img/00-Fidle-pdf.svg"></img><br>Course slides](https://fidle.cnrs.fr/supports)**<br>The course in pdf format<br>(12 Mo)| **[<img width="50px" src="fidle/img/00-Notebooks.svg"></img><br>Notebooks](https://fidle.cnrs.fr/notebooks)**<br> Get a Zip or clone this repository <br>(40 Mo)| **[<img width="50px" src="fidle/img/00-Datasets-tar.svg"></img><br>Datasets](https://fidle.cnrs.fr/datasets-fidle.tar)**<br>All the needed datasets<br>(1.2 Go)|**[<img width="50px" src="fidle/img/00-Videos.svg"></img><br>Videos](https://fidle.cnrs.fr/youtube)**<br> Our Youtube channel <br> | +| **[<img width="50px" src="fidle/img/00-Fidle-pdf.svg"></img><br>Course slides](https://fidle.cnrs.fr/supports)**<br>The course in pdf format<br>| **[<img width="50px" src="fidle/img/00-Notebooks.svg"></img><br>Notebooks](https://fidle.cnrs.fr/notebooks)**<br> Get a Zip or clone this repository <br>| **[<img width="50px" src="fidle/img/00-Datasets-tar.svg"></img><br>Datasets](https://fidle.cnrs.fr/datasets-fidle.tar)**<br>All the needed datasets<br>|**[<img width="50px" src="fidle/img/00-Videos.svg"></img><br>Videos](https://fidle.cnrs.fr/youtube)**<br> Our Youtube channel <br> | Have a look about **[How to get and install](https://fidle.cnrs.fr/installation)** these notebooks and datasets. @@ -46,7 +46,7 @@ Have a look about **[How to get and install](https://fidle.cnrs.fr/installation) ## Jupyter notebooks <!-- TOC_BEGIN --> -<!-- Automatically generated on : 12/04/23 09:28:57 --> +<!-- Automatically generated on : 31/10/23 18:24:11 --> ### Linear and logistic regression - **[LINR1](LinearReg/01-Linear-Regression.ipynb)** - [Linear regression with direct resolution](LinearReg/01-Linear-Regression.ipynb) @@ -59,21 +59,41 @@ Illustration of the problem of complexity with the polynomial regression Simple example of logistic regression with a sklearn solution ### Perceptron Model 1957 -- **[PER57](IRIS/01-Simple-Perceptron.ipynb)** - [Perceptron Model 1957](IRIS/01-Simple-Perceptron.ipynb) +- **[PER57](Perceptron/01-Simple-Perceptron.ipynb)** - [Perceptron Model 1957](Perceptron/01-Simple-Perceptron.ipynb) Example of use of a Perceptron, with sklearn and IRIS dataset of 1936 ! -### Basic regression using DN -- **[BHPD1](BHPD/01-DNN-Regression.ipynb)** - [Regression with a Dense Network (DNN)](BHPD/01-DNN-Regression.ipynb) +### BHPD regression (DNN), using Keras +- **[KBHPD1](BHPD.Keras/01-DNN-Regression.ipynb)** - [Regression with a Dense Network (DNN)](BHPD.Keras/01-DNN-Regression.ipynb) Simple example of a regression with the dataset Boston Housing Prices Dataset (BHPD) -- **[BHPD2](BHPD/02-DNN-Regression-Premium.ipynb)** - [Regression with a Dense Network (DNN) - Advanced code](BHPD/02-DNN-Regression-Premium.ipynb) +- **[KBHPD2](BHPD.Keras/02-DNN-Regression-Premium.ipynb)** - [Regression with a Dense Network (DNN) - Advanced code](BHPD.Keras/02-DNN-Regression-Premium.ipynb) A more advanced implementation of the precedent example -- **[WINE1](BHPD/03-DNN-Wine-Regression.ipynb)** - [Wine quality prediction with a Dense Network (DNN)](BHPD/03-DNN-Wine-Regression.ipynb) + +### BHPD regression (DNN), using PyTorch +- **[PBHPD1](BHPD.PyTorch/01-DNN-Regression_PyTorch.ipynb)** - [Regression with a Dense Network (DNN)](BHPD.PyTorch/01-DNN-Regression_PyTorch.ipynb) +A Simple regression with a Dense Neural Network (DNN) using Pytorch - BHPD dataset + +### Wine Quality prediction (DNN), using Keras +- **[KWINE1](Wine.Keras/01-DNN-Wine-Regression.ipynb)** - [Wine quality prediction with a Dense Network (DNN)](Wine.Keras/01-DNN-Wine-Regression.ipynb) +Another example of regression, with a wine quality prediction! + +### Wine Quality prediction (DNN), using PyTorch +- **[LWINE1](Wine.Lightning/01-DNN-Wine-Regression-lightning.ipynb)** - [Wine quality prediction with a Dense Network (DNN) using Lightning](Wine.Lightning/01-DNN-Wine-Regression-lightning.ipynb) Another example of regression, with a wine quality prediction! -### Basic classification using a DN -- **[MNIST1](MNIST/01-DNN-MNIST.ipynb)** - [Simple classification with DNN](MNIST/01-DNN-MNIST.ipynb) +### MNIST classification (DNN,CNN), using Keras +- **[KMNIST1](MNIST.Keras/01-DNN-MNIST.ipynb)** - [Simple classification with DNN](MNIST.Keras/01-DNN-MNIST.ipynb) +An example of classification using a dense neural network for the famous MNIST dataset +- **[KMNIST2](MNIST.Keras/02-CNN-MNIST.ipynb)** - [Simple classification with CNN](MNIST.Keras/02-CNN-MNIST.ipynb) +An example of classification using a convolutional neural network for the famous MNIST dataset + +### MNIST classification (DNN,CNN), using PyTorch +- **[PMNIST1](MNIST.PyTorch/01-DNN-MNIST_PyTorch.ipynb)** - [Simple classification with DNN](MNIST.PyTorch/01-DNN-MNIST_PyTorch.ipynb) +Example of classification with a fully connected neural network, using Pytorch + +### MNIST classification (DNN,CNN), using Lightning +- **[LMNIST1](MNIST.Lightning/01-DNN-MNIST_Lightning.ipynb)** - [Simple classification with DNN using Pytorch Lightning](MNIST.Lightning/01-DNN-MNIST_Lightning.ipynb) An example of classification using a dense neural network for the famous MNIST dataset -- **[MNIST2](MNIST/02-CNN-MNIST.ipynb)** - [Simple classification with CNN](MNIST/02-CNN-MNIST.ipynb) +- **[LMNIST2](MNIST.Lightning/02-CNN-MNIST_Lightning.ipynb)** - [Simple classification with CNN using Pytorch Lightning](MNIST.Lightning/02-CNN-MNIST_Lightning.ipynb) An example of classification using a convolutional neural network for the famous MNIST dataset ### Images classification with Convolutional Neural Networks (CNN) @@ -96,7 +116,7 @@ Bash script for an OAR batch submission of an ipython code - **[GTSRB11](GTSRB/batch_slurm.sh)** - [SLURM batch script](GTSRB/batch_slurm.sh) Bash script for a Slurm batch submission of an ipython code -### Sentiment analysis with word embeddin +### Sentiment analysis with word embedding - **[IMDB1](IMDB/01-One-hot-encoding.ipynb)** - [Sentiment analysis with hot-one encoding](IMDB/01-One-hot-encoding.ipynb) A basic example of sentiment analysis with sparse encoding, using a dataset from Internet Movie Database (IMDB) - **[IMDB2](IMDB/02-Keras-embedding.ipynb)** - [Sentiment analysis with text embedding](IMDB/02-Keras-embedding.ipynb) @@ -166,17 +186,19 @@ Using a a Deep Q-Network to play CartPole - an inverted pendulum problem (PyTorc - **[DRL2](DRL/FIDLE_rl_baselines_zoo.ipynb)** - [RL Baselines3 Zoo: Training in Colab](DRL/FIDLE_rl_baselines_zoo.ipynb) Demo of Stable baseline3 with Colab -### Miscellaneous -- **[ACTF1](Misc/Activation-Functions.ipynb)** - [Activation functions](Misc/Activation-Functions.ipynb) -Some activation functions, with their derivatives. -- **[NP1](Misc/Numpy.ipynb)** - [A short introduction to Numpy](Misc/Numpy.ipynb) +### Miscellaneous things, but very important! +- **[NP1](Misc/00-Numpy.ipynb)** - [A short introduction to Numpy](Misc/00-Numpy.ipynb) Numpy is an essential tool for the Scientific Python. -- **[SCRATCH1](Misc/Scratchbook.ipynb)** - [Scratchbook](Misc/Scratchbook.ipynb) -A scratchbook for small examples -- **[TSB1](Misc/Using-Tensorboard.ipynb)** - [Tensorboard with/from Jupyter ](Misc/Using-Tensorboard.ipynb) -4 ways to use Tensorboard from the Jupyter environment -- **[PANDAS1](Misc/Using-pandas.ipynb)** - [Quelques exemples avec Pandas](Misc/Using-pandas.ipynb) +- **[ACTF1](Misc/01-Activation-Functions.ipynb)** - [Activation functions](Misc/01-Activation-Functions.ipynb) +Some activation functions, with their derivatives. +- **[PANDAS1](Misc/02-Using-pandas.ipynb)** - [Quelques exemples avec Pandas](Misc/02-Using-pandas.ipynb) pandas is another essential tool for the Scientific Python. +- **[PYTORCH1](Misc/03-Using-Pytorch.ipynb)** - [Practical Lab : PyTorch](Misc/03-Using-Pytorch.ipynb) +PyTorch est l'un des principaux framework utilisé dans le Deep Learning +- **[TSB1](Misc/04-Using-Tensorboard.ipynb)** - [Tensorboard with/from Jupyter ](Misc/04-Using-Tensorboard.ipynb) +4 ways to use Tensorboard from the Jupyter environment +- **[SCRATCH1](Misc/99-Scratchbook.ipynb)** - [Scratchbook](Misc/99-Scratchbook.ipynb) +A scratchbook for small examples <!-- TOC_END --> @@ -194,4 +216,4 @@ See [Disclaimer](https://creativecommons.org/licenses/by-nc-sa/4.0/#). ---- -[<img width="80px" src="fidle/img/00-Fidle-logo-01.svg"></img>](#top) +[<img width="80px" src="fidle/img/logo-paysage.svg"></img>](#top) diff --git a/SYNOP/LADYB1-Ladybug.ipynb b/SYNOP/LADYB1-Ladybug.ipynb index f26b1b3..82651e0 100644 --- a/SYNOP/LADYB1-Ladybug.ipynb +++ b/SYNOP/LADYB1-Ladybug.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [LADYB1] - Prediction of a 2D trajectory via RNN\n", "<!-- DESC --> Artificial dataset generation and prediction attempt via a recurrent network\n", @@ -465,7 +465,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/SYNOP/SYNOP1-Preparation-of-data.ipynb b/SYNOP/SYNOP1-Preparation-of-data.ipynb index d62fbbd..0d9e4dc 100644 --- a/SYNOP/SYNOP1-Preparation-of-data.ipynb +++ b/SYNOP/SYNOP1-Preparation-of-data.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [SYNOP1] - Preparation of data\n", "<!-- DESC --> Episode 1 : Data analysis and preparation of a usuable meteorological dataset (SYNOP)\n", @@ -369,7 +369,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/SYNOP/SYNOP2-First-predictions.ipynb b/SYNOP/SYNOP2-First-predictions.ipynb index 38ca46a..3dc36b4 100644 --- a/SYNOP/SYNOP2-First-predictions.ipynb +++ b/SYNOP/SYNOP2-First-predictions.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [SYNOP2] - First predictions at 3h\n", "<!-- DESC --> Episode 2 : RNN training session for weather prediction attempt at 3h\n", @@ -393,7 +393,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/SYNOP/SYNOP3-12h-predictions.ipynb b/SYNOP/SYNOP3-12h-predictions.ipynb index e543389..73d2788 100644 --- a/SYNOP/SYNOP3-12h-predictions.ipynb +++ b/SYNOP/SYNOP3-12h-predictions.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [SYNOP3] - 12h predictions\n", "<!-- DESC --> Episode 3: Attempt to predict in a more longer term \n", @@ -302,7 +302,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/Transformers/01-Distilbert.ipynb b/Transformers/01-Distilbert.ipynb index 7096f71..5b4bd24 100755 --- a/Transformers/01-Distilbert.ipynb +++ b/Transformers/01-Distilbert.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [TRANS1] - IMDB, Sentiment analysis with Transformers \n", "<!-- DESC --> Using a Tranformer to perform a sentiment analysis (IMDB) - Jean Zay version\n", diff --git a/Transformers/02-distilbert_colab.ipynb b/Transformers/02-distilbert_colab.ipynb index ae735f0..f3f8d0f 100755 --- a/Transformers/02-distilbert_colab.ipynb +++ b/Transformers/02-distilbert_colab.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [TRANS2] - IMDB, Sentiment analysis with Transformers \n", "<!-- DESC --> Using a Tranformer to perform a sentiment analysis (IMDB) - Colab version\n", diff --git a/VAE/01-VAE-with-MNIST.ipynb b/VAE/01-VAE-with-MNIST.ipynb index 8b48f7a..a918312 100644 --- a/VAE/01-VAE-with-MNIST.ipynb +++ b/VAE/01-VAE-with-MNIST.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [VAE1] - First VAE, using functional API (MNIST dataset)\n", "<!-- DESC --> Construction and training of a VAE, using functional APPI, with a latent space of small dimension.\n", @@ -377,7 +377,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/VAE/02-VAE-with-MNIST.ipynb b/VAE/02-VAE-with-MNIST.ipynb index d0db25e..59b15a2 100644 --- a/VAE/02-VAE-with-MNIST.ipynb +++ b/VAE/02-VAE-with-MNIST.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [VAE2] - VAE, using a custom model class (MNIST dataset)\n", "<!-- DESC --> Construction and training of a VAE, using model subclass, with a latent space of small dimension.\n", @@ -472,7 +472,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/VAE/03-VAE-with-MNIST-post.ipynb b/VAE/03-VAE-with-MNIST-post.ipynb index 631679f..93803c7 100644 --- a/VAE/03-VAE-with-MNIST-post.ipynb +++ b/VAE/03-VAE-with-MNIST-post.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", "# <!-- TITLE --> [VAE3] - Analysis of the VAE's latent space of MNIST dataset\n", "<!-- DESC --> Visualization and analysis of the VAE's latent space of the dataset MNIST\n", @@ -306,7 +306,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/BHPD/03-DNN-Wine-Regression.ipynb b/Wine.Keras/01-DNN-Wine-Regression.ipynb similarity index 98% rename from BHPD/03-DNN-Wine-Regression.ipynb rename to Wine.Keras/01-DNN-Wine-Regression.ipynb index ecb36fa..cfcd1e1 100644 --- a/BHPD/03-DNN-Wine-Regression.ipynb +++ b/Wine.Keras/01-DNN-Wine-Regression.ipynb @@ -4,9 +4,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", - "# <!-- TITLE --> [WINE1] - Wine quality prediction with a Dense Network (DNN)\n", + "# <!-- TITLE --> [KWINE1] - Wine quality prediction with a Dense Network (DNN)\n", " <!-- DESC --> Another example of regression, with a wine quality prediction!\n", " <!-- AUTHOR : Jean-Luc Parouty (CNRS/SIMaP) -->\n", "\n", @@ -449,7 +449,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], diff --git a/WineQuality-DNN_Reg-lightning/03-DNN-Wine-Regression-lightning.ipynb b/Wine.Lightning/01-DNN-Wine-Regression-lightning.ipynb similarity index 97% rename from WineQuality-DNN_Reg-lightning/03-DNN-Wine-Regression-lightning.ipynb rename to Wine.Lightning/01-DNN-Wine-Regression-lightning.ipynb index 5a45fc1..dff8bee 100644 --- a/WineQuality-DNN_Reg-lightning/03-DNN-Wine-Regression-lightning.ipynb +++ b/Wine.Lightning/01-DNN-Wine-Regression-lightning.ipynb @@ -4,9 +4,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "<img width=\"800px\" src=\"../fidle/img/00-Fidle-header-01.svg\"></img>\n", + "<img width=\"800px\" src=\"../fidle/img/header.svg\"></img>\n", "\n", - "# <!-- TITLE --> [WINE1] - Wine quality prediction with a Dense Network (DNN) using Lightning\n", + "# <!-- TITLE --> [LWINE1] - Wine quality prediction with a Dense Network (DNN) using Lightning\n", " <!-- DESC --> Another example of regression, with a wine quality prediction!\n", " <!-- AUTHOR : Achille Mbogol Touye (EFFILIA-MIAI/SIMaP) -->\n", "\n", @@ -76,7 +76,7 @@ "from IPython.display import Markdown\n", "from importlib import reload\n", "from torch.utils.data import Dataset, DataLoader, random_split\n", - "from data_load import WineQualityDataset, Normalize, ToTensor\n", + "from modules.data_load import WineQualityDataset, Normalize, ToTensor\n", "from lightning.pytorch.loggers.tensorboard import TensorBoardLogger\n", "from torchmetrics.functional.regression import mean_absolute_error, mean_squared_error\n", "\n", @@ -541,7 +541,7 @@ "metadata": {}, "source": [ "---\n", - "<img width=\"80px\" src=\"../fidle/img/00-Fidle-logo-01.svg\"></img>" + "<img width=\"80px\" src=\"../fidle/img/logo-paysage.svg\"></img>" ] } ], @@ -561,7 +561,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.10" + "version": "3.9.2" }, "vscode": { "interpreter": { diff --git a/WineQuality-DNN_Reg-lightning/data_load.py b/Wine.Lightning/modules/data_load.py similarity index 100% rename from WineQuality-DNN_Reg-lightning/data_load.py rename to Wine.Lightning/modules/data_load.py diff --git a/WineQuality-DNN_Reg-lightning/.gitkeep b/WineQuality-DNN_Reg-lightning/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/fidle/about.yml b/fidle/about.yml index e5112a4..32032a3 100644 --- a/fidle/about.yml +++ b/fidle/about.yml @@ -9,11 +9,11 @@ # Formation Introduction au Deep Learning - 2022 #-------------------------------------------------------------------- # Formation Introduction au Deep Learning https://fidle.cnrs.fr -# By MIAI/CNRS/UGA 2022 +# By MIAI/CNRS/UGA 2023/24 # # This file describes the notebooks used by the Fidle training. -version: 2.2.4 +version: 2.4.0 content: notebooks name: Notebooks Fidle description: All notebooks used by the Fidle training @@ -24,18 +24,23 @@ readme_ipynb: README.ipynb default_ci: fidle/ci/default.yml toc: - LinearReg: Linear and logistic regression - IRIS: Perceptron Model 1957 - BHPD: Basic regression using DN - MNIST: Basic classification using a DN - GTSRB: Images classification with Convolutional Neural Networks (CNN) - IMDB: Sentiment analysis with word embeddin - SYNOP: Time series with Recurrent Neural Network (RNN) - Transformers: Sentiment analysis with transformer - AE: Unsupervised learning with an autoencoder neural network (AE) - VAE: Generative network with Variational Autoencoder (VAE) - DCGAN: Generative Adversarial Networks (GANs) - DDPM: Diffusion Model (DDPM) - Optimization: Training optimization - DRL: Deep Reinforcement Learning (DRL) - Misc: Miscellaneous + LinearReg: Linear and logistic regression + Perceptron: Perceptron Model 1957 + BHPD.Keras: BHPD regression (DNN), using Keras + BHPD.PyTorch: BHPD regression (DNN), using PyTorch + Wine.Keras: Wine Quality prediction (DNN), using Keras + Wine.Lightning: Wine Quality prediction (DNN), using PyTorch + MNIST.Keras: MNIST classification (DNN,CNN), using Keras + MNIST.PyTorch: MNIST classification (DNN,CNN), using PyTorch + MNIST.Lightning: MNIST classification (DNN,CNN), using Lightning + GTSRB: Images classification with Convolutional Neural Networks (CNN) + IMDB: Sentiment analysis with word embedding + SYNOP: Time series with Recurrent Neural Network (RNN) + Transformers: Sentiment analysis with transformer + AE: Unsupervised learning with an autoencoder neural network (AE) + VAE: Generative network with Variational Autoencoder (VAE) + DCGAN: Generative Adversarial Networks (GANs) + DDPM: Diffusion Model (DDPM) + Optimization: Training optimization + DRL: Deep Reinforcement Learning (DRL) + Misc: Miscellaneous things, but very important! diff --git a/fidle/ci/default.yml b/fidle/ci/default.yml index b0d1499..372cef3 100644 --- a/fidle/ci/default.yml +++ b/fidle/ci/default.yml @@ -1,6 +1,6 @@ campain: version: '1.0' - description: Automatically generated ci profile (12/04/23 09:28:57) + description: Automatically generated ci profile (31/10/23 18:24:11) directory: ./campains/default existing_notebook: 'remove # remove|skip' report_template: 'fidle # fidle|default' @@ -19,40 +19,73 @@ LOGR1: notebook: LinearReg/04-Logistic-Regression.ipynb # -# ------------ IRIS +# ------------ Perceptron # PER57: - notebook: IRIS/01-Simple-Perceptron.ipynb + notebook: Perceptron/01-Simple-Perceptron.ipynb # -# ------------ BHPD +# ------------ BHPD.Keras # -BHPD1: - notebook: BHPD/01-DNN-Regression.ipynb +KBHPD1: + notebook: BHPD.Keras/01-DNN-Regression.ipynb overrides: fit_verbosity: default -BHPD2: - notebook: BHPD/02-DNN-Regression-Premium.ipynb +KBHPD2: + notebook: BHPD.Keras/02-DNN-Regression-Premium.ipynb overrides: fit_verbosity: default -WINE1: - notebook: BHPD/03-DNN-Wine-Regression.ipynb + +# +# ------------ BHPD.PyTorch +# +PBHPD1: + notebook: BHPD.PyTorch/01-DNN-Regression_PyTorch.ipynb + +# +# ------------ Wine.Keras +# +KWINE1: + notebook: Wine.Keras/01-DNN-Wine-Regression.ipynb + overrides: + fit_verbosity: default + dataset_name: default + +# +# ------------ Wine.Lightning +# +LWINE1: + notebook: Wine.Lightning/01-DNN-Wine-Regression-lightning.ipynb overrides: fit_verbosity: default dataset_name: default # -# ------------ MNIST +# ------------ MNIST.Keras # -MNIST1: - notebook: MNIST/01-DNN-MNIST.ipynb +KMNIST1: + notebook: MNIST.Keras/01-DNN-MNIST.ipynb overrides: fit_verbosity: default -MNIST2: - notebook: MNIST/02-CNN-MNIST.ipynb +KMNIST2: + notebook: MNIST.Keras/02-CNN-MNIST.ipynb overrides: fit_verbosity: default +# +# ------------ MNIST.PyTorch +# +PMNIST1: + notebook: MNIST.PyTorch/01-DNN-MNIST_PyTorch.ipynb + +# +# ------------ MNIST.Lightning +# +LMNIST1: + notebook: MNIST.Lightning/01-DNN-MNIST_Lightning.ipynb +LMNIST2: + notebook: MNIST.Lightning/02-CNN-MNIST_Lightning.ipynb + # # ------------ GTSRB # @@ -321,14 +354,16 @@ DRL2: # # ------------ Misc # -ACTF1: - notebook: Misc/Activation-Functions.ipynb NP1: - notebook: Misc/Numpy.ipynb -SCRATCH1: - notebook: Misc/Scratchbook.ipynb + notebook: Misc/00-Numpy.ipynb +ACTF1: + notebook: Misc/01-Activation-Functions.ipynb +PANDAS1: + notebook: Misc/02-Using-pandas.ipynb +PYTORCH1: + notebook: Misc/03-Using-Pytorch.ipynb TSB1: - notebook: Misc/Using-Tensorboard.ipynb + notebook: Misc/04-Using-Tensorboard.ipynb overrides: ?? -PANDAS1: - notebook: Misc/Using-pandas.ipynb +SCRATCH1: + notebook: Misc/99-Scratchbook.ipynb diff --git a/fidle/img/00-Fidle-Anaconda.svg b/fidle/img/00-Fidle-Anaconda.svg deleted file mode 100644 index 470311b..0000000 --- a/fidle/img/00-Fidle-Anaconda.svg +++ /dev/null @@ -1 +0,0 @@ -<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 98.0859 23.1622"><title>00-Fidle-Anaconda</title><g id="Calque_2" data-name="Calque 2"><g id="Contains_1" data-name="Contains #1"><rect x="0.6697" y="0.6697" width="96.7465" height="19.2734" rx="2.2368" style="fill:none;stroke:#2cb34a;stroke-miterlimit:10;stroke-width:1.3393646861416477px"/><g id="Calque_4" data-name="Calque 4"><path d="M25.2956,5.966A.1966.1966,0,0,0,25.1,5.833h-.1148a.2067.2067,0,0,0-.1955.133l-3.5742,8.14a.2127.2127,0,0,0,.1954.3145h1a.3481.3481,0,0,0,.333-.2418l.5636-1.3063h3.4359l.5635,1.3063c.0808.1691.161.2418.3339.2418h1a.2125.2125,0,0,0,.195-.3145ZM23.87,11.53l1.1271-2.6611h.0341L26.1811,11.53Z" style="fill:#2cb34a"/><path d="M36.2518,5.9542H35.1944a.2243.2243,0,0,0-.2185.2295v4.9957h-.0114L30.1836,5.8328h-.2869a.2214.2214,0,0,0-.2186.2179v8.14a.2316.2316,0,0,0,.2186.2293h1.0452a.224.224,0,0,0,.2181-.2293V8.99h.0114l4.8048,5.5516h.2758a.2214.2214,0,0,0,.2181-.2179v-8.14a.232.232,0,0,0-.2181-.2295" style="fill:#2cb34a"/><path d="M41.415,5.966a.1953.1953,0,0,0-.195-.133h-.1155a.2066.2066,0,0,0-.195.133l-3.574,8.14a.2127.2127,0,0,0,.1953.3145h1a.3482.3482,0,0,0,.3329-.2418l.5636-1.3063h3.4359l.5635,1.3063c.0806.1691.161.2418.3334.2418h1a.2128.2128,0,0,0,.1955-.3145ZM39.99,11.53l1.1271-2.6611h.0342L42.301,11.53Z" style="fill:#2cb34a"/><path d="M51.2375,12.2676a.2036.2036,0,0,0-.2872,0,2.5841,2.5841,0,0,1-4.3329-2.1039A2.6832,2.6832,0,0,1,49.16,7.3469l.02-.0009a2.6435,2.6435,0,0,1,1.77.7138.1784.1784,0,0,0,.25.0376.1811.1811,0,0,0,.0376-.0376l.7013-.7619a.2332.2332,0,0,0-.0024-.33L51.9273,6.96A3.7849,3.7849,0,0,0,49.146,5.8349a4.2523,4.2523,0,0,0-4.1491,4.353l0,.0133a4.2309,4.2309,0,0,0,4.1177,4.3412l.0311.0007A3.8412,3.8412,0,0,0,51.9388,13.37a.2388.2388,0,0,0,.011-.3265Z" style="fill:#2cb34a"/><path d="M56.6682,5.833a4.2335,4.2335,0,0,0-4.1265,4.3379l.0008.0285a4.1366,4.1366,0,1,0,8.2629,0,4.2429,4.2429,0,0,0-4.1161-4.3658l-.0211-.0006m0,7.1362a2.71,2.71,0,0,1-2.6319-2.77,2.6421,2.6421,0,1,1,5.2753,0,2.72,2.72,0,0,1-2.6434,2.77" style="fill:#2cb34a"/><path d="M68.6365,5.9542H67.5791a.224.224,0,0,0-.218.2295v4.9957h-.0119L62.5683,5.8328h-.2868a.2215.2215,0,0,0-.2187.2179v8.14a.2317.2317,0,0,0,.2187.2293h1.0453a.224.224,0,0,0,.2185-.2293V8.99h.0111l4.8048,5.5516h.2757a.2216.2216,0,0,0,.2185-.2179v-8.14a.2324.2324,0,0,0-.2185-.2295" style="fill:#2cb34a"/><path d="M73.2469,5.954H70.5231a.2225.2225,0,0,0-.2066.23v8.0065a.2227.2227,0,0,0,.2066.23h2.7238a4.1489,4.1489,0,0,0,4.034-4.2454,4.1427,4.1427,0,0,0-4.034-4.2213m-.1379,6.9913h-1.31V7.4175h1.31a2.6051,2.6051,0,0,1,2.5906,2.62q0,.0693-.0045.1383A2.6085,2.6085,0,0,1,73.2523,12.94q-.0716.0045-.1433.0049" style="fill:#2cb34a"/><path d="M84.9648,14.1062l-3.54-8.14a.1954.1954,0,0,0-.195-.133h-.1154a.2067.2067,0,0,0-.195.133l-3.5741,8.14a.2127.2127,0,0,0,.1953.3145h1a.3482.3482,0,0,0,.3329-.2418l.5635-1.3063h3.436l.5635,1.3063c.0806.1691.161.2418.3334.2418h1a.2129.2129,0,0,0,.1956-.3145M80.0006,11.53l1.1259-2.6611h.0346L82.3105,11.53Z" style="fill:#2cb34a"/><path d="M6.362,14.2845l.0013-.0547a11.1264,11.1264,0,0,1,.0983-1.283l.0054-.0391-.0353-.0133a9.7437,9.7437,0,0,1-1.0987-.5583l-.0449-.027-.0225.0475a10.3564,10.3564,0,0,0-.5824,1.5484l-.015.05.0483.0139A9.159,9.159,0,0,0,6.31,14.28Z" style="fill:#2cb34a"/><path d="M7.4609,6.7056l.0023-.013a8.9453,8.9453,0,0,0-.9237.0656q.0594.49.1642.9721a5.0894,5.0894,0,0,1,.7572-1.0247" style="fill:#2cb34a"/><path d="M6.362,14.6737v-.0453l-.0432-.0045a9.41,9.41,0,0,1-1.3628-.24l-.1191-.03.0674.1074a7.4327,7.4327,0,0,0,1.484,1.7151l.0939.0794-.0169-.1253a14.09,14.09,0,0,1-.1033-1.4573" style="fill:#2cb34a"/><path d="M8.5707,3.2473a6.933,6.933,0,0,0-1.5591.7974A9.5535,9.5535,0,0,1,8.09,4.3111a10.8416,10.8416,0,0,1,.4809-1.0638" style="fill:#2cb34a"/><path d="M10.87,2.8446a6.8181,6.8181,0,0,0-.8092.05,10.2828,10.2828,0,0,1,1.0773.9083l.2827.2742-.2779.28a9.7318,9.7318,0,0,0-.6662.7514l-.01.0122c-.0024.0026-.0423.0494-.1084.1343a4.604,4.604,0,0,1,.5117-.0287,4.9692,4.9692,0,0,1,4.8376,5.0912A4.9692,4.9692,0,0,1,10.87,15.4082a4.64,4.64,0,0,1-2.5288-.7514,9.4711,9.4711,0,0,1-1.0936.0646q-.2549,0-.51-.0139a13.87,13.87,0,0,0,.1576,1.8018,6.8191,6.8191,0,0,0,3.9745,1.28A7.2942,7.2942,0,0,0,17.97,10.316,7.2939,7.2939,0,0,0,10.87,2.8446" style="fill:#2cb34a"/><path d="M9.9285,4.61c.1413-.1764.2876-.3471.4366-.5111a9.4369,9.4369,0,0,0-1.052-.8153,10.0084,10.0084,0,0,0-.59,1.2456,9.8146,9.8146,0,0,1,.9373.4214c.1413-.1909.24-.3071.2681-.34" style="fill:#2cb34a"/><path d="M5.1087,8.8745l.0274.0462.0423-.0318a9.8369,9.8369,0,0,1,1.0281-.6811L6.24,8.189,6.2305,8.15a10.8127,10.8127,0,0,1-.24-1.3048l-.0063-.0531-.05.01a9.0877,9.0877,0,0,0-1.5439.44l-.0473.018.0187.0487a10.2861,10.2861,0,0,0,.7473,1.5659" style="fill:#2cb34a"/><path d="M5.0274,9.5992l-.0406.0342a9.7542,9.7542,0,0,0-1.0762,1.0808l-.0327.0376.0367.0342a9.609,9.609,0,0,0,1.1806.9267l.0426.0287.0252-.0468a10.59,10.59,0,0,1,.6261-1.0232l.0227-.033-.0261-.0307a10.4605,10.4605,0,0,1-.7292-.9631Z" style="fill:#2cb34a"/><path d="M7.8118,14.3106l.1234-.0073L7.84,14.2206a4.9675,4.9675,0,0,1-.9768-1.1429l-.003-.0142-.0789-.0363-.009.07a10.9551,10.9551,0,0,0-.0832,1.1646v.049l.046.0032q.2531.0141.5085.0139.2843,0,.5684-.0174" style="fill:#2cb34a"/><path d="M7.5751,6.0568a11.03,11.03,0,0,1,.2843-1.09,8.94,8.94,0,0,0-1.3625-.3005,10.3927,10.3927,0,0,0-.0148,1.4594,9.5772,9.5772,0,0,1,1.093-.0693" style="fill:#2cb34a"/><path d="M8.26,6.0314a4.7143,4.7143,0,0,1,.9941-.5134q-.3744-.1854-.764-.3361-.1332.42-.23.8495" style="fill:#2cb34a"/><path d="M4.9567,12.1009l-.0375-.0246a9.9908,9.9908,0,0,1-1.0548-.8023l-.095-.0826.0161.1285a7.8048,7.8048,0,0,0,.56,2.0744l.05.1168.0408-.1213a10.7684,10.7684,0,0,1,.5-1.2468Z" style="fill:#2cb34a"/><path d="M5.87,5.0119a7.481,7.481,0,0,0-1.102,1.484A9.67,9.67,0,0,1,5.883,6.2181,11.1319,11.1319,0,0,1,5.87,5.0119" style="fill:#2cb34a"/><path d="M6.08,10.25l.0046-.1291a5.2787,5.2787,0,0,1,.22-1.3318l.0376-.1253-.108.0655a9.7284,9.7284,0,0,0-.814.5544l-.0375.03.0268.0407c.1832.2745.3821.5431.5912.7984Z" style="fill:#2cb34a"/><path d="M6.1272,11.0214,6.11,10.8951l-.07.1039a10.1788,10.1788,0,0,0-.5472.9064L5.47,11.95l.0425.0245a9.24,9.24,0,0,0,.9285.4814l.1127.0513-.05-.118a5.24,5.24,0,0,1-.3757-1.3673" style="fill:#2cb34a"/><path d="M4.7458,9.2342,4.78,9.2058l-.0225-.0375a10.6568,10.6568,0,0,1-.6012-1.17l-.0511-.12-.0375.124a7.86,7.86,0,0,0-.3406,2.0982l-.0039.1321.0862-.094a10.266,10.266,0,0,1,.9368-.9041" style="fill:#2cb34a"/></g><path d="M83.4285,16.2586h2.7734V10.3963a.8925.8925,0,0,1,.8916-.8916h4.688a.8936.8936,0,0,1,.8916.8877L92.7,16.2586h2.57L89.5022,22.742Z" style="fill:#e12229"/><path d="M91.7814,9.7881a.6085.6085,0,0,1,.6084.6057l.0277,6.148h2.2208l-5.1413,5.78-5.4153-5.78h2.4034V10.3965a.6083.6083,0,0,1,.6084-.6084h4.6879m0-.5664H87.0935a1.1762,1.1762,0,0,0-1.1748,1.1748v5.5789H82.775l.8933.9536,5.4153,5.78.4242.4529.4124-.4637,5.1413-5.78.8385-.9429H92.9813l-.0251-5.584a1.1773,1.1773,0,0,0-1.1748-1.17Z" style="fill:#fff"/></g></g></svg> \ No newline at end of file diff --git a/fidle/img/00-Fidle-a-distance-01.svg b/fidle/img/00-Fidle-a-distance-01.svg deleted file mode 100755 index 3559819..0000000 --- a/fidle/img/00-Fidle-a-distance-01.svg +++ /dev/null @@ -1 +0,0 @@ -<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 151.0917 45.005"><path d="M47.9919,32.977a13.424,13.424,0,0,0,5.4811-1.8386,6.7273,6.7273,0,0,1,2.455-.96,15.2069,15.2069,0,0,0-5.9862-17.0857A17.7216,17.7216,0,0,0,37.6058,11.01c-4.3345.7164-8.8269,3.996-10.5862,5.4673C25.961,17.3639,13.369,29.3005,8.1168,26.9216c-3.5532-1.61,2.7909-7.4675-.1189-12.82a.2323.2323,0,0,0-.3874-.0258c-1.4813,1.9984-2.9293,4.3968-4.9019,3.32-.8812-.4812-1.6744-2.0178-2.2858-2.99A.23.23,0,0,0,0,14.5371,24.26,24.26,0,0,0,6.0983,29.3426c4.5289,5.4189,12.465,11.7291,25.2885,13.0059,5.5522.5529,18.7217-1.1976,23.9833-10.6647a13.2741,13.2741,0,0,0-1.2693.63,14.7716,14.7716,0,0,1-5.9875,1.9915c-.1831.0169-.3649.0245-.5466.0245a10.5714,10.5714,0,0,1-5.9687-2.3927,1.1184,1.1184,0,1,1,.8549-1.0851c0,.0183-.0044.0353-.0057.0535C43.53,31.7328,45.7847,33.1928,47.9919,32.977ZM31.2094,37.9323a20.3764,20.3764,0,0,1-4.7961.8712c-1.0832.0006-1.5335-.307-1.748-.768-.5643-1.2134,1.4687-2.9677,3.272-4.2263A.6668.6668,0,1,1,28.7,34.903a10.991,10.991,0,0,0-2.7544,2.5318c.3523.0761,1.4964.1245,4.9176-.7913a.6672.6672,0,0,1,.3459,1.2888Zm15.45-16.2541a2.5468,2.5468,0,0,1,2.4726,2.4538,1.6639,1.6639,0,1,0-1.4731,2.4317,1.7278,1.7278,0,0,0,.3088-.0308,2.37,2.37,0,0,1-1.3083.4025,2.6324,2.6324,0,0,1,0-5.2572ZM38.0706,4.6089a1.3336,1.3336,0,0,0,.524,1.8116c.6453.3553,2.0046-.4177,2.8292.7346.4284.5988-.8963-2.7147-1.5417-3.0708A1.3328,1.3328,0,0,0,38.0706,4.6089Zm6.7939.1428c-1.6619.9743-1.97,5.0031-1.5417,4.4043A7.584,7.584,0,0,1,46.152,7.0878a1.3337,1.3337,0,0,0-1.2875-2.3361ZM43.1787.31c-.85.9831-.2679,3.5325-.1157,3.0651a5.4212,5.4212,0,0,1,1.3687-1.926A.8741.8741,0,0,0,44.41.2135.8656.8656,0,0,0,43.1787.31Z" style="fill:#e12229"/><path d="M61.1535,29.2182H77.9224a2.222,2.222,0,0,0,2.2191-2.2224V16.0064a2.2242,2.2242,0,0,0-2.2191-2.2224H61.1535a2.222,2.222,0,0,0-2.2191,2.2224V26.9991A2.2234,2.2234,0,0,0,61.1535,29.2182Z" style="fill:#e12229"/><path d="M86.4244,26.81V16.1919a.4979.4979,0,0,0-.245-.4273h0a.4955.4955,0,0,0-.4935-.0033l-4.5144,2.56v6.3592l4.511,2.56a.4935.4935,0,0,0,.7419-.4306Z" style="fill:#e12229"/><path d="M97.3754,10.3981h8.3535v1.0034H98.58v6.7231h6.5732V19.128H98.58v8.1778H97.3754Z" style="fill:#808285"/><path d="M110.1683,10.3981V27.3058h-1.2041V10.3981Z" style="fill:#808285"/><path d="M114.2572,10.649a25.2147,25.2147,0,0,1,4.34-.3764c3.086,0,5.419.8281,6.8233,2.3081a8.0594,8.0594,0,0,1,2.1074,5.87,9.2468,9.2468,0,0,1-2.207,6.3965c-1.4805,1.6308-4.0391,2.584-7.3,2.584a36.8491,36.8491,0,0,1-3.7637-.15Zm1.2041,15.6533a20.8778,20.8778,0,0,0,2.7344.1255c5.4189,0,8.1025-3.0351,8.1025-7.9272.0508-4.29-2.333-7.2246-7.7763-7.2246a15.3662,15.3662,0,0,0-3.0606.2759Z" style="fill:#808285"/><path d="M130.4867,10.3981h1.2041V26.3023h7.5264v1.0035h-8.73Z" style="fill:#808285"/><path d="M149.6771,18.902h-6.748v7.4H150.48v1.0035H141.725V10.3981h8.3535v1.0034h-7.1494v6.497h6.748Z" style="fill:#808285"/><path d="M60.4984,43.8248a3.5916,3.5916,0,0,0,1.9185.5752,2.0111,2.0111,0,0,0,2.228-2.0508c0-1.0918-.5757-1.7558-1.8149-2.3315-1.24-.5161-2.3462-1.3428-2.3462-2.7442A2.5211,2.5211,0,0,1,63.1986,34.78a3.2758,3.2758,0,0,1,1.771.4424l-.2656.5757A2.8766,2.8766,0,0,0,63.14,35.37a1.7827,1.7827,0,0,0-1.977,1.771c0,1.1358.6343,1.667,1.9033,2.2574,1.4756.7231,2.2578,1.5048,2.2578,2.892a2.6758,2.6758,0,0,1-2.9512,2.7,3.9334,3.9334,0,0,1-2.1245-.6049Z" style="fill:#808285"/><path d="M67.0062,41.1837c0,2.4492,1.1656,3.2163,2.4195,3.2163a3.3738,3.3738,0,0,0,1.7119-.3686l.1621.5312a3.975,3.975,0,0,1-1.9624.4277c-1.9033,0-3.01-1.4755-3.01-3.5708,0-2.3315,1.1948-3.792,2.8623-3.792,2.0508,0,2.4644,2.0362,2.4644,3.128a3.6318,3.6318,0,0,1-.0152.4282Zm3.9395-.5464c.0293-1.21-.4868-2.4346-1.8443-2.4346-1.3427,0-1.9477,1.3282-2.0659,2.4346Z" style="fill:#808285"/><path d="M72.7318,43.9425A2.6116,2.6116,0,0,0,74.163,44.4c1.0034,0,1.52-.5752,1.52-1.3276,0-.7378-.3838-1.166-1.3281-1.6084-1.0474-.4873-1.6968-1.0772-1.6968-1.9624a1.9052,1.9052,0,0,1,2.0508-1.8741,2.5888,2.5888,0,0,1,1.4463.4278l-.28.56a2.0951,2.0951,0,0,0-1.2544-.3979,1.176,1.176,0,0,0-1.2837,1.18c0,.6933.4131,1.0034,1.2983,1.4458,1.0035.4575,1.7266,1.0332,1.7266,2.11A2.0192,2.0192,0,0,1,74.119,44.99a3.11,3.11,0,0,1-1.6528-.4721Z" style="fill:#808285"/><path d="M77.4691,43.9425A2.6116,2.6116,0,0,0,78.9,44.4c1.0034,0,1.52-.5752,1.52-1.3276,0-.7378-.3838-1.166-1.3281-1.6084-1.0474-.4873-1.6968-1.0772-1.6968-1.9624a1.9052,1.9052,0,0,1,2.0508-1.8741,2.5885,2.5885,0,0,1,1.4463.4278l-.28.56a2.0951,2.0951,0,0,0-1.2544-.3979,1.176,1.176,0,0,0-1.2837,1.18c0,.6933.4131,1.0034,1.2983,1.4458,1.0035.4575,1.7266,1.0332,1.7266,2.11A2.0192,2.0192,0,0,1,78.8563,44.99a3.11,3.11,0,0,1-1.6528-.4721Z" style="fill:#808285"/><path d="M82.8109,36.4029a.5516.5516,0,0,1-.5166-.59.565.565,0,0,1,.5459-.59.5928.5928,0,0,1-.0146,1.1806Zm-.3247,8.4546V37.775h.6787v7.0825Z" style="fill:#808285"/><path d="M87.4442,45.005c-1.5932,0-2.8916-1.313-2.8916-3.6445,0-2.4791,1.4312-3.733,2.9951-3.733,1.6675,0,2.9068,1.3428,2.9068,3.6446,0,2.6264-1.5786,3.7329-2.9951,3.7329Zm.0445-.5752c1.3872,0,2.2724-1.52,2.2724-3.1431,0-1.2392-.5757-3.084-2.2578-3.084-1.623,0-2.2573,1.712-2.2573,3.1285,0,1.5786.8554,3.0986,2.228,3.0986Z" style="fill:#808285"/><path d="M91.8275,39.4571c0-.7231-.0293-1.1362-.0591-1.6821h.6348l.0439,1.1655h.03a2.3426,2.3426,0,0,1,2.169-1.313c.708,0,2.2134.3985,2.2134,2.7886v4.4414h-.6788V40.5343c0-1.1953-.3984-2.3164-1.7265-2.3164a1.9956,1.9956,0,0,0-1.874,1.5786,2.7894,2.7894,0,0,0-.0738.6343v4.4267h-.6787Z" style="fill:#808285"/><path d="M105.389,43.2052a12.1551,12.1551,0,0,0,.0884,1.6523h-.62l-.0884-.9443h-.0444a2.2773,2.2773,0,0,1-1.9771,1.0918,1.8489,1.8489,0,0,1-1.9184-1.9478c0-1.6523,1.3872-2.582,3.8808-2.5674v-.2065c0-.8555-.1479-2.0806-1.6382-2.0654a2.6628,2.6628,0,0,0-1.5493.4721l-.1914-.5019a3.3065,3.3065,0,0,1,1.8443-.5606c1.7114,0,2.2133,1.2393,2.2133,2.6412ZM102.3934,34.75l1.2837,2.0068h-.5161L101.4345,34.75Zm2.3169,6.3154c-1.24-.03-3.1723.1475-3.1723,1.9034a1.2775,1.2775,0,0,0,1.2837,1.4609,1.85,1.85,0,0,0,1.8295-1.4019A1.5739,1.5739,0,0,0,104.71,42.6Z" style="fill:#808285"/><path d="M114.9364,34.4552v8.8086c0,.4721.03,1.166.0591,1.5937h-.605l-.0444-1.2246h-.044a2.3579,2.3579,0,0,1-2.2134,1.3721c-1.52,0-2.7294-1.3428-2.7294-3.5855,0-2.42,1.3129-3.792,2.8476-3.792a2.1763,2.1763,0,0,1,2.0215,1.1509h.0293V34.4552Zm-.6787,6.0791a2.8863,2.8863,0,0,0-.0591-.605,2.0347,2.0347,0,0,0-1.9477-1.7266c-1.4014,0-2.1983,1.4019-2.1983,3.1578,0,1.5937.6787,3.0693,2.1543,3.0693a2.0811,2.0811,0,0,0,1.9917-1.771,2.2467,2.2467,0,0,0,.0591-.5606Z" style="fill:#808285"/><path d="M117.1508,36.4029a.5516.5516,0,0,1-.5166-.59.565.565,0,0,1,.5459-.59.5928.5928,0,0,1-.0147,1.1806Zm-.3247,8.4546V37.775h.6787v7.0825Z" style="fill:#808285"/><path d="M119.1283,43.9425A2.6116,2.6116,0,0,0,120.56,44.4c1.0034,0,1.52-.5752,1.52-1.3276,0-.7378-.3838-1.166-1.3282-1.6084-1.0473-.4873-1.6967-1.0772-1.6967-1.9624a1.9052,1.9052,0,0,1,2.0508-1.8741,2.5882,2.5882,0,0,1,1.4462.4278l-.28.56a2.0951,2.0951,0,0,0-1.2544-.3979,1.176,1.176,0,0,0-1.2837,1.18c0,.6933.4131,1.0034,1.2983,1.4458,1.0034.4575,1.7266,1.0332,1.7266,2.11a2.0192,2.0192,0,0,1-2.2427,2.0361,3.11,3.11,0,0,1-1.6528-.4721Z" style="fill:#808285"/><path d="M125.0756,36.2257V37.775h1.8149v.5606h-1.8149v4.6625c0,.93.2949,1.4019.9589,1.4019a1.9224,1.9224,0,0,0,.6788-.0884l.0888.5313a2.2093,2.2093,0,0,1-.8706.1474,1.3616,1.3616,0,0,1-1.1362-.4721,2.5964,2.5964,0,0,1-.3984-1.6822v-4.5H123.29V37.775h1.107V36.506Z" style="fill:#808285"/><path d="M132.0258,43.2052a12.1552,12.1552,0,0,0,.0883,1.6523h-.62l-.0884-.9443h-.0444a2.2773,2.2773,0,0,1-1.9771,1.0918,1.849,1.849,0,0,1-1.9184-1.9478c0-1.6523,1.3872-2.582,3.8808-2.5674v-.2065c0-.8555-.1479-2.0806-1.6381-2.0654a2.6629,2.6629,0,0,0-1.5494.4721l-.1914-.5019a3.307,3.307,0,0,1,1.8443-.5606c1.7114,0,2.2134,1.2393,2.2134,2.6412Zm-.6788-2.14c-1.24-.03-3.1723.1475-3.1723,1.9034a1.2775,1.2775,0,0,0,1.2837,1.4609,1.8507,1.8507,0,0,0,1.83-1.4019,1.5774,1.5774,0,0,0,.059-.4277Z" style="fill:#808285"/><path d="M133.8558,39.4571c0-.7231-.0293-1.1362-.059-1.6821h.6347l.044,1.1655h.03a2.3424,2.3424,0,0,1,2.1689-1.313c.708,0,2.2134.3985,2.2134,2.7886v4.4414h-.6787V40.5343c0-1.1953-.3985-2.3164-1.7266-2.3164a1.9956,1.9956,0,0,0-1.874,1.5786,2.7894,2.7894,0,0,0-.0738.6343v4.4267h-.6787Z" style="fill:#808285"/><path d="M145.1151,44.5773a3.8973,3.8973,0,0,1-1.8.413c-1.8442,0-3.0835-1.4165-3.0835-3.6152,0-2.1543,1.3277-3.7476,3.3052-3.7476a3.114,3.114,0,0,1,1.5933.3833l-.2359.5757a2.8259,2.8259,0,0,0-1.4316-.354c-1.7114,0-2.5376,1.5049-2.5376,3.128,0,1.8593,1.0327,3.0395,2.5083,3.0395a3.4141,3.4141,0,0,0,1.5049-.354Z" style="fill:#808285"/><path d="M146.4442,41.1837c0,2.4492,1.1655,3.2163,2.4195,3.2163a3.3738,3.3738,0,0,0,1.7119-.3686l.1621.5312a3.975,3.975,0,0,1-1.9624.4277c-1.9033,0-3.01-1.4755-3.01-3.5708,0-2.3315,1.1948-3.792,2.8623-3.792,2.0508,0,2.4639,2.0362,2.4639,3.128a3.7546,3.7546,0,0,1-.0147.4282Zm3.94-.5464c.0293-1.21-.4868-2.4346-1.8443-2.4346-1.3427,0-1.9477,1.3282-2.0659,2.4346Z" style="fill:#808285"/><line x1="150.4791" y1="32.7638" x2="60.2701" y2="32.7638" style="fill:none;stroke:#bcbec0;stroke-miterlimit:10;stroke-width:0.25px"/></svg> \ No newline at end of file diff --git a/fidle/img/00-Fidle-header-01.svg b/fidle/img/00-Fidle-header-01.svg deleted file mode 100755 index f7467f4..0000000 --- a/fidle/img/00-Fidle-header-01.svg +++ /dev/null @@ -1 +0,0 @@ -<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 319.4819 38.2457"><path d="M19.6212,13.4825a5.49,5.49,0,0,0,2.2409-.7517,2.75,2.75,0,0,1,1.0037-.3925A6.2169,6.2169,0,0,0,20.4184,5.353a7.2454,7.2454,0,0,0-5.0435-.8518,10.436,10.436,0,0,0-4.3281,2.2353c-.4328.3626-5.581,5.2428-7.7283,4.27C1.8658,10.3486,4.46,7.9537,3.27,5.7652a.0949.0949,0,0,0-.1584-.0105c-.6056.817-1.1976,1.7975-2.0041,1.3573A3.7988,3.7988,0,0,1,.1729,5.89.0941.0941,0,0,0,0,5.9434a9.9185,9.9185,0,0,0,2.4932,6.0532,15.0278,15.0278,0,0,0,10.339,5.3173c2.27.2261,7.6543-.49,9.8054-4.36a5.4574,5.4574,0,0,0-.5189.2577,6.04,6.04,0,0,1-2.448.8142c-.0748.0069-.1491.01-.2234.01a4.3218,4.3218,0,0,1-2.44-.9782.4573.4573,0,1,1,.3495-.4436l-.0023.0218A3.5637,3.5637,0,0,0,19.6212,13.4825ZM12.76,15.5084a8.3323,8.3323,0,0,1-1.9609.3562c-.4428,0-.627-.1255-.7147-.314-.2306-.4961.6005-1.2133,1.3378-1.7279a.2726.2726,0,0,1,.312.4472,4.4932,4.4932,0,0,0-1.1262,1.0351,5.352,5.352,0,0,0,2.0105-.3235.2728.2728,0,0,1,.1415.5269ZM19.0763,8.863a1.0412,1.0412,0,0,1,1.0109,1.0032.68.68,0,1,0-.6023.9942.7023.7023,0,0,0,.1263-.0126.9691.9691,0,0,1-.5349.1646,1.0763,1.0763,0,0,1,0-2.1494ZM15.5649,1.8843a.5453.5453,0,0,0,.2143.7407c.2638.1453.82-.1708,1.1567.3.1751.2449-.3665-1.11-.63-1.2554A.5449.5449,0,0,0,15.5649,1.8843Zm2.7777.0584c-.68.3984-.8055,2.0455-.63,1.8007a3.1,3.1,0,0,1,1.1567-.8456.5453.5453,0,0,0-.5264-.9551ZM17.6534.1266c-.3475.402-.11,1.4443-.0473,1.2532a2.216,2.216,0,0,1,.5595-.7875.3573.3573,0,0,0-.0087-.505A.3538.3538,0,0,0,17.6534.1266Z" style="fill:#e1232b"/><path d="M1.2051,20.5944H4.62v.41H1.6973v2.7481H4.3838v.41H1.6973v3.3428H1.2051Z" style="fill:#808184"/><path d="M6.4355,20.5944v6.9111H5.9434V20.5944Z" style="fill:#808184"/><path d="M8.1069,20.6969a10.2978,10.2978,0,0,1,1.774-.1538,3.7464,3.7464,0,0,1,2.789.9433,3.2947,3.2947,0,0,1,.8614,2.3995,3.78,3.78,0,0,1-.9024,2.6147A3.9543,3.9543,0,0,1,9.645,27.5568a15.0553,15.0553,0,0,1-1.5381-.0616Zm.4922,6.3984a8.5161,8.5161,0,0,0,1.1177.0513,2.96,2.96,0,0,0,3.312-3.24c.0205-1.7535-.9536-2.9532-3.1787-2.9532a6.2767,6.2767,0,0,0-1.251.1128Z" style="fill:#808184"/><path d="M14.7422,20.5944h.4922v6.5009h3.0761v.41H14.7422Z" style="fill:#808184"/><path d="M22.5874,24.07H19.8291v3.0249h3.0864v.41H19.3369V20.5944h3.4146v.41H19.8291V23.66h2.7583Z" style="fill:#808184"/><path d="M39.1846,4.6617h5.874v1.26H40.6753V9.9679h4.064V11.21h-4.064v5.4126H39.1846Z"/><path d="M49.67,16.8c-2.0942,0-3.727-1.5971-3.727-4.4184,0-2.9639,1.7392-4.5254,3.851-4.5254,2.1827,0,3.7266,1.6679,3.7266,4.4189,0,3.23-2.0054,4.5249-3.833,4.5249Zm.0708-1.1357c1.4019,0,2.2539-1.5791,2.2539-3.354,0-1.4195-.5859-3.3184-2.2539-3.3184-1.65,0-2.2715,1.81-2.2715,3.354,0,1.7393.834,3.3184,2.2539,3.3184Z"/><path d="M55.3457,10.5714c0-.9228-.0356-1.7925-.0708-2.5376h1.313l.0708,1.5972h.0537a2.4268,2.4268,0,0,1,2.2-1.7749,2.4683,2.4683,0,0,1,.3906.0356V9.3468a1.6584,1.6584,0,0,0-.4438-.0356,2.094,2.094,0,0,0-1.97,1.9345,4.7283,4.7283,0,0,0-.0532.7451v4.6319H55.3457Z"/><path d="M60.7212,10.3229c0-.9229-.0357-1.58-.0708-2.2891h1.313l.0708,1.2954H62.07a2.7267,2.7267,0,0,1,2.4844-1.4731,2.3279,2.3279,0,0,1,2.2363,1.5971h.0352a3.3136,3.3136,0,0,1,.9228-1.0825,2.6718,2.6718,0,0,1,1.668-.5146c1.1538,0,2.6089.7807,2.6089,3.5136v5.253H70.5522V11.6183c0-1.5083-.479-2.52-1.6679-2.52a1.8265,1.8265,0,0,0-1.668,1.3667,2.6661,2.6661,0,0,0-.1064.7807v5.377H65.6367V11.3346c0-1.2422-.479-2.2363-1.5971-2.2363a1.9,1.9,0,0,0-1.7388,1.5083,2.6343,2.6343,0,0,0-.1065.7631v5.253H60.7212Z"/><path d="M79.9712,14.6a12.0266,12.0266,0,0,0,.1245,2.023H78.7471l-.1245-1.0293h-.0533A2.8,2.8,0,0,1,76.209,16.8a2.3557,2.3557,0,0,1-2.4312-2.4663c0-2.0942,1.7925-3.1767,4.7027-3.1592v-.2129c0-.8339-.2305-1.9873-1.7744-1.97a3.5381,3.5381,0,0,0-1.9873.586l-.3375-1.0293a4.9562,4.9562,0,0,1,2.5733-.6924c2.3423,0,3.0166,1.5971,3.0166,3.39Zm-1.4551-2.3779c-1.4018-.0181-3.2475.23-3.2475,1.9521a1.314,1.314,0,0,0,1.331,1.49A1.8578,1.8578,0,0,0,78.4453,14.28a1.6053,1.6053,0,0,0,.0708-.4971Z"/><path d="M84.0146,5.9928v2.041h2.0586V9.17H84.0146V13.943c0,1.0825.3375,1.5972,1.1,1.5972a3.1034,3.1034,0,0,0,.7989-.0889l.0532,1.1357a3.7008,3.7008,0,0,1-1.2778.1953,2.0422,2.0422,0,0,1-1.5791-.6211,3.2973,3.2973,0,0,1-.5679-2.2182V9.17H81.2993V8.0338H82.542V6.49Z"/><path d="M88.3965,6.6139a.8954.8954,0,0,1-.9048-.94.9142.9142,0,0,1,.9224-.9585.9042.9042,0,0,1,.9228.9585.89.89,0,0,1-.9228.94Zm-.71,10.0088V8.0338h1.4907v8.5889Z"/><path d="M94.7305,16.8c-2.0943,0-3.7271-1.5971-3.7271-4.4184,0-2.9639,1.7393-4.5254,3.8511-4.5254,2.1826,0,3.7266,1.6679,3.7266,4.4189,0,3.23-2.0054,4.5249-3.8331,4.5249Zm.0708-1.1357c1.4018,0,2.2539-1.5791,2.2539-3.354,0-1.4195-.586-3.3184-2.2539-3.3184-1.65,0-2.2715,1.81-2.2715,3.354,0,1.7393.834,3.3184,2.2539,3.3184Z"/><path d="M100.4238,10.3229c0-.9229-.0351-1.58-.0708-2.2891h1.313l.0889,1.2954h.0356a2.8837,2.8837,0,0,1,2.5733-1.4731c1.1709,0,2.7329.7632,2.7329,3.4428v5.3238H105.606V11.4943c0-1.2779-.4258-2.396-1.7393-2.396a1.9934,1.9934,0,0,0-1.8633,1.5439,2.9159,2.9159,0,0,0-.0888.7275v5.253h-1.4908Z"/><path d="M114.4922,4.6617v11.961h-1.4907V4.6617Z"/><path d="M117.0991,10.3229c0-.9229-.0351-1.58-.0708-2.2891h1.313l.0889,1.2954h.0356a2.8837,2.8837,0,0,1,2.5733-1.4731c1.1709,0,2.7329.7632,2.7329,3.4428v5.3238h-1.4907V11.4943c0-1.2779-.4263-2.396-1.7393-2.396a1.9933,1.9933,0,0,0-1.8633,1.5439,2.9156,2.9156,0,0,0-.0889.7275v5.253h-1.4907Z"/><path d="M127.8682,5.9928v2.041h2.0586V9.17h-2.0586V13.943c0,1.0825.3374,1.5972,1.1,1.5972a3.1031,3.1031,0,0,0,.7988-.0889l.0532,1.1357a3.7,3.7,0,0,1-1.2778.1953,2.0422,2.0422,0,0,1-1.5791-.6211,3.2978,3.2978,0,0,1-.5679-2.2182V9.17h-1.2427V8.0338h1.2427V6.49Z"/><path d="M131.5225,10.5714c0-.9228-.0357-1.7925-.0708-2.5376h1.3129l.0708,1.5972h.0538a2.4267,2.4267,0,0,1,2.2-1.7749,2.47,2.47,0,0,1,.3906.0356V9.3468a1.6588,1.6588,0,0,0-.4439-.0356,2.0938,2.0938,0,0,0-1.97,1.9345,4.7283,4.7283,0,0,0-.0532.7451v4.6319h-1.4907Z"/><path d="M139.8794,16.8c-2.0942,0-3.7271-1.5971-3.7271-4.4184,0-2.9639,1.7393-4.5254,3.8511-4.5254,2.1826,0,3.7266,1.6679,3.7266,4.4189,0,3.23-2.0054,4.5249-3.833,4.5249Zm.0708-1.1357c1.4019,0,2.2539-1.5791,2.2539-3.354,0-1.4195-.5859-3.3184-2.2539-3.3184-1.65,0-2.2715,1.81-2.2715,3.354,0,1.7393.834,3.3184,2.2539,3.3184Z"/><path d="M152.2988,4.1119V14.4576c0,.71.0352,1.5972.0708,2.1651h-1.313l-.0888-1.3662h-.0532A2.7961,2.7961,0,0,1,148.3413,16.8c-1.8989,0-3.3364-1.7036-3.3364-4.3828,0-2.9106,1.6152-4.561,3.4785-4.561a2.4864,2.4864,0,0,1,2.2891,1.207h.0356V4.1119Zm-1.4907,7.3467a3.9173,3.9173,0,0,0-.0532-.6387,2.0659,2.0659,0,0,0-1.97-1.7568c-1.4732,0-2.2715,1.4727-2.2715,3.3008,0,1.7568.7451,3.2119,2.2358,3.2119a2.0875,2.0875,0,0,0,1.9873-1.7568,2.27,2.27,0,0,0,.0713-.6392Z"/><path d="M161.24,14.28c0,.9048.0357,1.668.0713,2.3423h-1.3134l-.0889-1.2422h-.0352A2.9219,2.9219,0,0,1,157.3359,16.8c-1.4023,0-2.6977-.8691-2.6977-3.62V8.0338h1.4907v4.8975c0,1.5439.4258,2.6264,1.686,2.6264a1.9728,1.9728,0,0,0,1.81-1.3486,2.6966,2.6966,0,0,0,.124-.7983V8.0338H161.24Z"/><path d="M169.1689,16.3385a4.9267,4.9267,0,0,1-2.1469.4438c-2.36,0-3.94-1.686-3.94-4.3833a4.314,4.314,0,0,1,6.14-4.1347l-.3369,1.1533a3.4218,3.4218,0,0,0-1.5616-.3545c-1.7924,0-2.7153,1.5259-2.7153,3.2827,0,1.9878,1.1182,3.2119,2.6973,3.2119a3.8425,3.8425,0,0,0,1.6328-.3549Z"/><path d="M172.8579,5.9928v2.041h2.0586V9.17h-2.0586V13.943c0,1.0825.3374,1.5972,1.1,1.5972a3.1025,3.1025,0,0,0,.7988-.0889l.0533,1.1357a3.7011,3.7011,0,0,1-1.2779.1953,2.0424,2.0424,0,0,1-1.5791-.6211,3.2979,3.2979,0,0,1-.5678-2.2182V9.17h-1.2422V8.0338h1.2422V6.49Z"/><path d="M177.2393,6.6139a.8954.8954,0,0,1-.9048-.94.9142.9142,0,0,1,.9223-.9585.9043.9043,0,0,1,.9229.9585.89.89,0,0,1-.9229.94Zm-.71,10.0088V8.0338H178.02v8.5889Z"/><path d="M183.5732,16.8c-2.0942,0-3.727-1.5971-3.727-4.4184,0-2.9639,1.7392-4.5254,3.8511-4.5254,2.1826,0,3.7265,1.6679,3.7265,4.4189,0,3.23-2.0053,4.5249-3.833,4.5249Zm.0708-1.1357c1.4019,0,2.2539-1.5791,2.2539-3.354,0-1.4195-.5859-3.3184-2.2539-3.3184-1.65,0-2.2714,1.81-2.2714,3.354,0,1.7393.8339,3.3184,2.2539,3.3184Z"/><path d="M189.2666,10.3229c0-.9229-.0352-1.58-.0708-2.2891h1.313l.0889,1.2954h.0356a2.8836,2.8836,0,0,1,2.5732-1.4731c1.1709,0,2.733.7632,2.733,3.4428v5.3238h-1.4908V11.4943c0-1.2779-.4258-2.396-1.7392-2.396a1.9933,1.9933,0,0,0-1.8633,1.5439,2.9156,2.9156,0,0,0-.0889.7275v5.253h-1.4907Z"/><path d="M207.3638,14.6a12.0161,12.0161,0,0,0,.1245,2.023H206.14l-.1245-1.0293h-.0532a2.8,2.8,0,0,1-2.36,1.2065,2.3557,2.3557,0,0,1-2.4312-2.4663c0-2.0942,1.7925-3.1767,4.7026-3.1592v-.2129c0-.8339-.23-1.9873-1.7744-1.97a3.5386,3.5386,0,0,0-1.9873.586l-.3374-1.0293a4.9557,4.9557,0,0,1,2.5733-.6924c2.3423,0,3.0166,1.5971,3.0166,3.39Zm-1.4551-2.3779c-1.4019-.0181-3.2476.23-3.2476,1.9521a1.314,1.314,0,0,0,1.3311,1.49,1.8578,1.8578,0,0,0,1.8457-1.3838,1.6053,1.6053,0,0,0,.0708-.4971Z"/><path d="M216.27,14.28c0,.9048.0357,1.668.0713,2.3423h-1.3135l-.0888-1.2422h-.0352A2.922,2.922,0,0,1,212.3657,16.8c-1.4023,0-2.6977-.8691-2.6977-3.62V8.0338h1.4907v4.8975c0,1.5439.4258,2.6264,1.686,2.6264a1.9728,1.9728,0,0,0,1.81-1.3486,2.6966,2.6966,0,0,0,.124-.7983V8.0338H216.27Z"/><path d="M222.21,4.8214a15.1011,15.1011,0,0,1,2.8394-.248,6.0353,6.0353,0,0,1,4.2417,1.3129A5.6765,5.6765,0,0,1,230.96,10.34a6.5215,6.5215,0,0,1-1.6328,4.774,6.4413,6.4413,0,0,1-4.7027,1.5971,22.7951,22.7951,0,0,1-2.4136-.1245Zm1.4908,10.6123a8.9087,8.9087,0,0,0,1.2422.0532c2.7685,0,4.4545-1.6147,4.4545-5.0932.0176-2.8926-1.3842-4.6314-4.2416-4.6314a7.3789,7.3789,0,0,0-1.4551.124Z"/><path d="M233.76,12.5587c.0175,2.2715,1.2773,3.0523,2.6616,3.0523a4.78,4.78,0,0,0,2.0942-.4082l.2486,1.0825a6.0768,6.0768,0,0,1-2.5557.497c-2.4668,0-3.9043-1.7211-3.9043-4.33,0-2.6441,1.4375-4.5962,3.709-4.5962,2.4844,0,3.1943,2.2715,3.1943,3.9575a3.8783,3.8783,0,0,1-.0532.7451Zm4.01-1.0825c.0181-1.2066-.4614-2.52-1.8989-2.52-1.3838,0-1.9873,1.4018-2.0938,2.52Z"/><path d="M241.92,12.5587c.0176,2.2715,1.2774,3.0523,2.6616,3.0523a4.7812,4.7812,0,0,0,2.0943-.4082l.248,1.0825a6.0716,6.0716,0,0,1-2.5552.497c-2.4668,0-3.9043-1.7211-3.9043-4.33,0-2.6441,1.4375-4.5962,3.709-4.5962,2.4844,0,3.1944,2.2715,3.1944,3.9575a3.8685,3.8685,0,0,1-.0533.7451Zm4.01-1.0825c.018-1.2066-.4615-2.52-1.899-2.52-1.3838,0-1.9873,1.4018-2.0937,2.52Z"/><path d="M249.1753,10.8019c0-1.1709-.0357-2.023-.0708-2.7681h1.3306l.0888,1.3486h.0357a2.9638,2.9638,0,0,1,2.68-1.5263c1.9165,0,3.3007,1.7036,3.3007,4.3657,0,3.123-1.7216,4.5781-3.5317,4.5781a2.5777,2.5777,0,0,1-2.3066-1.2422h-.0357v4.543h-1.4907Zm1.4907,2.4311a2.6626,2.6626,0,0,0,.0708.6568,2.068,2.068,0,0,0,2.0054,1.7036c1.5083,0,2.2891-1.42,2.2891-3.3008,0-1.7393-.7627-3.2119-2.2535-3.2119a2.2068,2.2068,0,0,0-2.0229,1.81,2.5255,2.5255,0,0,0-.0889.6387Z"/><path d="M261.9126,4.6617h1.4907V15.3629h4.5786v1.26h-6.0693Z"/><path d="M270.1807,12.5587c.0175,2.2715,1.2773,3.0523,2.6616,3.0523a4.78,4.78,0,0,0,2.0942-.4082l.2486,1.0825a6.0768,6.0768,0,0,1-2.5557.497c-2.4668,0-3.9043-1.7211-3.9043-4.33,0-2.6441,1.4375-4.5962,3.709-4.5962,2.4844,0,3.1943,2.2715,3.1943,3.9575a3.8783,3.8783,0,0,1-.0532.7451Zm4.01-1.0825c.0181-1.2066-.4614-2.52-1.8989-2.52-1.3838,0-1.9873,1.4018-2.0938,2.52Z"/><path d="M283.0254,14.6a12.0266,12.0266,0,0,0,.1245,2.023h-1.3486l-.1245-1.0293h-.0533a2.8,2.8,0,0,1-2.36,1.2065,2.3557,2.3557,0,0,1-2.4312-2.4663c0-2.0942,1.7925-3.1767,4.7027-3.1592v-.2129c0-.8339-.2305-1.9873-1.7744-1.97a3.5387,3.5387,0,0,0-1.9874.586l-.3374-1.0293a4.9562,4.9562,0,0,1,2.5733-.6924c2.3423,0,3.0166,1.5971,3.0166,3.39ZM281.57,12.2218c-1.4018-.0181-3.2475.23-3.2475,1.9521a1.314,1.314,0,0,0,1.331,1.49A1.8578,1.8578,0,0,0,281.5,14.28a1.6053,1.6053,0,0,0,.0708-.4971Z"/><path d="M285.3652,10.5714c0-.9228-.0356-1.7925-.0708-2.5376h1.313l.0708,1.5972h.0537a2.4268,2.4268,0,0,1,2.2-1.7749,2.4708,2.4708,0,0,1,.3907.0356V9.3468a1.66,1.66,0,0,0-.4439-.0356,2.0938,2.0938,0,0,0-1.97,1.9345,4.7163,4.7163,0,0,0-.0532.7451v4.6319h-1.4908Z"/><path d="M290.7583,10.3229c0-.9229-.0352-1.58-.0708-2.2891h1.313l.0889,1.2954h.0356a2.8836,2.8836,0,0,1,2.5732-1.4731c1.1709,0,2.733.7632,2.733,3.4428v5.3238H295.94V11.4943c0-1.2779-.4262-2.396-1.7392-2.396a1.9933,1.9933,0,0,0-1.8633,1.5439,2.9156,2.9156,0,0,0-.0889.7275v5.253h-1.4907Z"/><path d="M300.5156,6.6139a.8954.8954,0,0,1-.9048-.94.9143.9143,0,0,1,.9224-.9585.9043.9043,0,0,1,.9229.9585.89.89,0,0,1-.9229.94Zm-.71,10.0088V8.0338h1.4907v8.5889Z"/><path d="M303.6909,10.3229c0-.9229-.0351-1.58-.0708-2.2891h1.313l.0889,1.2954h.0356a2.8837,2.8837,0,0,1,2.5733-1.4731c1.1709,0,2.7329.7632,2.7329,3.4428v5.3238H308.873V11.4943c0-1.2779-.4262-2.396-1.7392-2.396a1.9933,1.9933,0,0,0-1.8633,1.5439,2.9156,2.9156,0,0,0-.0889.7275v5.253h-1.4907Z"/><path d="M319.4111,15.4161c0,2.0762-.373,3.1587-1.1006,3.8687a4.1522,4.1522,0,0,1-2.8745.9936,5.2068,5.2068,0,0,1-2.5732-.6211l.3726-1.1538a4.4241,4.4241,0,0,0,2.2358.586c1.4551,0,2.4668-.7808,2.4668-2.875V15.274h-.0357a2.6328,2.6328,0,0,1-2.3955,1.3311c-1.9521,0-3.3183-1.7925-3.3183-4.2236,0-2.9815,1.7568-4.5254,3.5136-4.5254a2.5526,2.5526,0,0,1,2.36,1.3667h.0357l.0712-1.189h1.313c-.0356.603-.0708,1.3306-.0708,2.4487ZM317.92,11.2989a2.6236,2.6236,0,0,0-.0713-.6743,2.0022,2.0022,0,0,0-1.9165-1.5615c-1.331,0-2.2358,1.2773-2.2358,3.2471,0,1.8457.8164,3.1235,2.2358,3.1235a1.9568,1.9568,0,0,0,1.8814-1.4907,2.5966,2.5966,0,0,0,.1064-.8164Z"/><line x1="30.9665" y1="4.4557" x2="30.9665" y2="27.3725" style="fill:#59595b"/><path d="M39.6577,21.8619H39.98V24.005h.0141a1.1244,1.1244,0,0,1,1.0787-.6372c.7285,0,1.2744.6514,1.2744,1.7226,0,1.2188-.6934,1.7789-1.3374,1.7789a1.1071,1.1071,0,0,1-1.0508-.6373h-.021l-.021.5674H39.63c.0141-.2031.0278-.5322.0278-.7495Zm.3223,3.6768a.8339.8339,0,0,0,.0283.2309.9879.9879,0,0,0,.9595.8267c.6933,0,1.05-.6582,1.05-1.499,0-.7564-.35-1.4566-1.0367-1.4566a1.0622,1.0622,0,0,0-.9663.8614,1.136,1.136,0,0,0-.0351.28Z"/><path d="M43.0176,23.4376l.77,2.15c.0913.2588.168.5249.2241.7422h.0142c.063-.2031.1469-.4761.2309-.7564l.7217-2.1362h.3428l-.8472,2.3252a5.9374,5.9374,0,0,1-1.0576,2.1714,1.97,1.97,0,0,1-.5606.3921l-.1259-.273a1.7423,1.7423,0,0,0,.56-.4135,2.8542,2.8542,0,0,0,.49-.7915.4725.4725,0,0,0,.0346-.1607.4391.4391,0,0,0-.0278-.14l-1.1138-3.11Z"/><path d="M50.1523,26.6525a2.4922,2.4922,0,0,1-1.0922.21c-1.0157,0-1.9961-.6865-1.9961-2.3882a2.1653,2.1653,0,0,1,2.1289-2.4585,2.0109,2.0109,0,0,1,.9316.1753l-.105.28a1.8936,1.8936,0,0,0-.8335-.1753c-1.0434,0-1.7861.7354-1.7861,2.1641,0,1.394.6934,2.1152,1.751,2.1152a1.9628,1.9628,0,0,0,.9033-.189Z"/><path d="M50.8789,26.7994V22.0787h.3149L52.77,24.7613c.336.5953.6021,1.0928.8194,1.5831l.0136-.0074c-.0488-.7143-.0556-1.2324-.0556-1.9887v-2.27h.3149v4.7207h-.3081L51.9854,24.11a14.7383,14.7383,0,0,1-.8194-1.583l-.021.0073c.042.623.0488,1.1343.0488,1.9961v2.269Z"/><path d="M54.9248,22.1417a4.1572,4.1572,0,0,1,.9033-.0976,1.5718,1.5718,0,0,1,1.17.3711,1.22,1.22,0,0,1,.3223.8613,1.2446,1.2446,0,0,1-.8755,1.2329v.0137c.3711.1123.5952.4692.7071,1.0434a6.0236,6.0236,0,0,0,.3222,1.2329h-.3359a6.9147,6.9147,0,0,1-.2871-1.1626c-.1333-.6792-.4136-.98-1.0088-1.0014h-.5952v2.164h-.3223Zm.3223,2.2344h.602a1.0324,1.0324,0,0,0,1.1421-1.0435c0-.7006-.4482-1.0224-1.17-1.0224a2.5158,2.5158,0,0,0-.5742.0556Z"/><path d="M58.0327,26.3092a1.7089,1.7089,0,0,0,.9107.2729.9544.9544,0,0,0,1.0576-.9731c0-.5186-.2735-.8335-.8614-1.1069-.5883-.2451-1.1137-.6372-1.1137-1.3028a1.1968,1.1968,0,0,1,1.2886-1.1836,1.55,1.55,0,0,1,.84.21l-.126.2729a1.3664,1.3664,0,0,0-.7422-.2031.846.846,0,0,0-.9385.84c0,.54.3008.7915.9034,1.0718.7.3433,1.0717.7144,1.0717,1.3726a1.27,1.27,0,0,1-1.4008,1.2817,1.87,1.87,0,0,1-1.0088-.2871Z"/><path d="M63.8921,24.7755v.2871H62.2393v-.2871Z"/><path d="M66.0337,22.1417a5.1231,5.1231,0,0,1,1.0225-.0976,2.2048,2.2048,0,0,1,1.625.5459,2.2931,2.2931,0,0,1,.6093,1.7231,2.6818,2.6818,0,0,1-.5884,1.8491,2.3062,2.3062,0,0,1-1.7861.6724,7.2309,7.2309,0,0,1-.8823-.042Zm.3223,4.3848a3.9324,3.9324,0,0,0,.6025.0278c1.2676,0,1.9956-.7212,1.9956-2.22a1.7593,1.7593,0,0,0-1.9116-2.0171,3.4024,3.4024,0,0,0-.6865.063Z"/><path d="M70.1982,25.0553c0,1.1626.5533,1.5268,1.149,1.5268a1.601,1.601,0,0,0,.812-.1748l.0771.252a1.8848,1.8848,0,0,1-.9311.2031c-.9038,0-1.4292-.7-1.4292-1.6948,0-1.1064.5674-1.8,1.3589-1.8.9736,0,1.17.9663,1.17,1.4844a1.6985,1.6985,0,0,1-.0073.2031Zm1.87-.2588c.0141-.5747-.231-1.1558-.8755-1.1558-.6372,0-.9243.63-.98,1.1558Z"/><path d="M73.1455,23.4376l.7144,2.1011c.1049.2871.1963.5742.2729.8545h.021c.07-.2661.1753-.56.28-.8545l.7143-2.1011h.3365l-1.2188,3.3618h-.28l-1.1767-3.3618Z"/><path d="M76.1064,22.0787h.3223v4.4478H78.32v.2729H76.1064Z"/><path d="M80.2021,26.8693c-.9946,0-1.8-.8331-1.8-2.3951,0-1.604.8335-2.4653,1.87-2.4653,1.0156,0,1.7929.8472,1.7929,2.395,0,1.6953-.8891,2.4654-1.8559,2.4654Zm.0279-.28c1.0088,0,1.499-1.0786,1.499-2.1714,0-.9663-.4341-2.1289-1.4917-2.1289s-1.499,1.1274-1.499,2.1782c0,1.0151.49,2.1221,1.4848,2.1221Z"/><path d="M85.8657,26.6241a3.2578,3.2578,0,0,1-1.19.2242,1.9836,1.9836,0,0,1-1.478-.5669,2.5607,2.5607,0,0,1-.6162-1.8,2.1933,2.1933,0,0,1,2.22-2.4512,2.3054,2.3054,0,0,1,.9521.1821l-.105.273A1.9442,1.9442,0,0,0,84.78,22.31c-1.086,0-1.8633.7422-1.8633,2.122,0,1.4288.7353,2.1363,1.8071,2.1363a1.9171,1.9171,0,0,0,.8194-.1333v-1.709h-.9873V24.46h1.31Z"/><path d="M87.6489,27.08l1.9893-5.0708H89.96L87.9639,27.08Z"/><path d="M91.8906,22.1417a4.1572,4.1572,0,0,1,.9033-.0976,1.5722,1.5722,0,0,1,1.17.3711,1.22,1.22,0,0,1,.3222.8613,1.2446,1.2446,0,0,1-.8755,1.2329v.0137c.3711.1123.5953.4692.7071,1.0434a6.0307,6.0307,0,0,0,.3222,1.2329H94.104a6.9147,6.9147,0,0,1-.2871-1.1626c-.1333-.6792-.4136-.98-1.0088-1.0014h-.5952v2.164h-.3223Zm.3223,2.2344h.602a1.0324,1.0324,0,0,0,1.1421-1.0435c0-.7006-.4482-1.0224-1.17-1.0224a2.5158,2.5158,0,0,0-.5742.0556Z"/><path d="M97.1558,24.4532H95.4609v2.066h1.9048v.28h-2.227V22.0787h2.122v.28h-1.8v1.814h1.6949Z"/><path d="M97.9736,26.3092a1.7089,1.7089,0,0,0,.9107.2729.9544.9544,0,0,0,1.0576-.9731c0-.5186-.2734-.8335-.8613-1.1069-.5884-.2451-1.1138-.6372-1.1138-1.3028a1.1968,1.1968,0,0,1,1.2886-1.1836,1.55,1.55,0,0,1,.84.21l-.126.2729a1.3664,1.3664,0,0,0-.7422-.2031.8459.8459,0,0,0-.9384.84c0,.54.3012.7915.9033,1.0718.7.3433,1.0718.7144,1.0718,1.3726a1.27,1.27,0,0,1-1.4009,1.2817,1.87,1.87,0,0,1-1.0088-.2871Z"/><path d="M101.4248,22.0787v4.7207h-.3223V22.0787Z"/><path d="M102.4819,26.7994V22.0787h.315l1.5761,2.6826c.336.5953.6021,1.0928.8194,1.5831l.0141-.0074c-.0493-.7143-.0561-1.2324-.0561-1.9887v-2.27h.3149v4.7207h-.3081L103.5884,24.11a14.7565,14.7565,0,0,1-.8194-1.583l-.021.0073c.042.623.0489,1.1343.0489,1.9961v2.269Z"/><path d="M106.5273,22.0787h2.1221v.28h-1.8v1.8843h1.66v.28h-1.66v2.2763h-.3223Z"/><path d="M110.9805,26.8693c-.9947,0-1.8-.8331-1.8-2.3951,0-1.604.8335-2.4653,1.87-2.4653,1.0156,0,1.793.8472,1.793,2.395,0,1.6953-.8892,2.4654-1.856,2.4654Zm.0278-.28c1.0088,0,1.499-1.0786,1.499-2.1714,0-.9663-.4341-2.1289-1.4917-2.1289s-1.499,1.1274-1.499,2.1782c0,1.0151.49,2.1221,1.4849,2.1221Z"/><path d="M114.5,27.08l1.9893-5.0708h.3217L114.8149,27.08Z"/><path d="M118.6021,26.3092a1.7082,1.7082,0,0,0,.9106.2729.9544.9544,0,0,0,1.0576-.9731c0-.5186-.2734-.8335-.8613-1.1069-.5884-.2451-1.1138-.6372-1.1138-1.3028a1.1968,1.1968,0,0,1,1.2886-1.1836,1.55,1.55,0,0,1,.84.21l-.126.2729a1.366,1.366,0,0,0-.7421-.2031.846.846,0,0,0-.9385.84c0,.54.3008.7915.9033,1.0718.7.3433,1.0718.7144,1.0718,1.3726a1.27,1.27,0,0,1-1.4009,1.2817,1.87,1.87,0,0,1-1.0088-.2871Z"/><path d="M122.2568,25.1534l-.5185,1.646h-.3291l1.5268-4.7207h.3155l1.5268,4.7207h-.3364l-.5254-1.646Zm1.5689-.2729-.4971-1.5059a8.9574,8.9574,0,0,1-.2314-.9033h-.021c-.063.3081-.14.5811-.231.8965l-.4975,1.5127Z"/><path d="M125.4834,22.1417a4.1572,4.1572,0,0,1,.9033-.0976,1.5718,1.5718,0,0,1,1.17.3711,1.22,1.22,0,0,1,.3223.8613,1.2446,1.2446,0,0,1-.8755,1.2329v.0137c.3711.1123.5952.4692.707,1.0434a6.0279,6.0279,0,0,0,.3223,1.2329h-.3359a6.9147,6.9147,0,0,1-.2871-1.1626c-.1333-.6792-.4136-.98-1.0088-1.0014h-.5952v2.164h-.3223Zm.3223,2.2344h.602a1.0324,1.0324,0,0,0,1.1421-1.0435c0-.7006-.4482-1.0224-1.17-1.0224a2.5158,2.5158,0,0,0-.5742.0556Z"/><path d="M129.0532,22.0787v4.7207h-.3222V22.0787Z"/><path d="M130.9849,27.08l1.9892-5.0708h.3218L131.3,27.08Z"/><path d="M135.5493,22.0787v4.7207h-.3222V22.0787Z"/><path d="M136.6128,22.1417a5.1231,5.1231,0,0,1,1.0225-.0976,2.2048,2.2048,0,0,1,1.625.5459,2.2931,2.2931,0,0,1,.6093,1.7231,2.6823,2.6823,0,0,1-.5883,1.8491,2.3066,2.3066,0,0,1-1.7862.6724,7.2309,7.2309,0,0,1-.8823-.042Zm.3223,4.3848a3.9324,3.9324,0,0,0,.6025.0278c1.2676,0,1.9956-.7212,1.9956-2.22a1.7593,1.7593,0,0,0-1.9116-2.0171,3.4024,3.4024,0,0,0-.6865.063Z"/><path d="M140.6587,22.1417a4.1572,4.1572,0,0,1,.9033-.0976,1.571,1.571,0,0,1,1.17.3711,1.22,1.22,0,0,1,.3223.8613,1.2446,1.2446,0,0,1-.8755,1.2329v.0137c.3711.1123.5952.4692.707,1.0434a6.0279,6.0279,0,0,0,.3223,1.2329h-.3359a6.8628,6.8628,0,0,1-.2871-1.1626c-.1333-.6792-.4136-.98-1.0088-1.0014h-.5952v2.164h-.3223Zm.3223,2.2344h.6025a1.0323,1.0323,0,0,0,1.1416-1.0435c0-.7006-.4482-1.0224-1.17-1.0224a2.5158,2.5158,0,0,0-.5742.0556Z"/><path d="M144.229,22.0787v4.7207h-.3223V22.0787Z"/><path d="M145.1528,26.3092a1.7089,1.7089,0,0,0,.9107.2729.9544.9544,0,0,0,1.0576-.9731c0-.5186-.2734-.8335-.8613-1.1069-.5884-.2451-1.1138-.6372-1.1138-1.3028a1.1968,1.1968,0,0,1,1.2886-1.1836,1.55,1.55,0,0,1,.84.21l-.126.2729a1.3664,1.3664,0,0,0-.7422-.2031.8459.8459,0,0,0-.9384.84c0,.54.3007.7915.9033,1.0718.7.3433,1.0718.7144,1.0718,1.3726a1.27,1.27,0,0,1-1.4009,1.2817,1.87,1.87,0,0,1-1.0088-.2871Z"/><line x1="0.9591" y1="36.1069" x2="242.1513" y2="36.1069" style="fill:none;stroke:#e6e7e7;stroke-miterlimit:10;stroke-width:0.25px"/><line x1="279.1389" y1="36.1069" x2="318.76" y2="36.1069" style="fill:none;stroke:#e6e7e7;stroke-miterlimit:10;stroke-width:0.25px"/><circle cx="316.0167" cy="24.4233" r="2.4525" style="fill:#fff"/><path d="M316.0588,21.7158a2.68,2.68,0,0,1,2.7012,2.6964,2.5676,2.5676,0,0,1-.7706,1.8971,2.6614,2.6614,0,0,1-1.9306.7993,2.7128,2.7128,0,0,1-2.6916-2.6963,2.6375,2.6375,0,0,1,.7945-1.9115A2.5844,2.5844,0,0,1,316.0588,21.7158Zm.01.4864a2.0913,2.0913,0,0,0-1.5552.65,2.1532,2.1532,0,0,0-.66,1.56,2.236,2.236,0,0,0,2.2147,2.2053,2.1652,2.1652,0,0,0,1.57-.66,2.0638,2.0638,0,0,0,.6356-1.5457,2.2112,2.2112,0,0,0-2.2052-2.21Zm-1.2038,1.83a1.2035,1.2035,0,0,1,.4-.768,1.1766,1.1766,0,0,1,.7848-.272,1.3061,1.3061,0,0,1,1.0112.4092,1.4923,1.4923,0,0,1,.3755,1.05,1.4426,1.4426,0,0,1-.39,1.0327,1.3308,1.3308,0,0,1-1.0113.4117,1.2,1.2,0,0,1-.79-.2745,1.1664,1.1664,0,0,1-.4-.78h.6791q.024.4913.5923.4912a.535.535,0,0,0,.4574-.2456,1.3686,1.3686,0,0,0,.0146-1.3072.6073.6073,0,0,0-1.0642.2527h.1974l-.5344.5345-.5344-.5345.2116,0Z"/><path d="M299.4332,24.4109a2.3488,2.3488,0,0,1-.3325,1.1994,2.3808,2.3808,0,0,1-.8936.8844,2.5227,2.5227,0,0,1-1.2365.3252,2.486,2.486,0,0,1-2.14-1.21,2.4133,2.4133,0,0,1,0-2.4091,2.4865,2.4865,0,0,1,2.14-1.21,2.5217,2.5217,0,0,1,1.2365.3253,2.4316,2.4316,0,0,1,1.2261,2.094Z" style="fill:#fff;fill-rule:evenodd"/><path d="M296.9394,21.7338a2.7132,2.7132,0,0,1,1.9535.7827,2.5825,2.5825,0,0,1,.5923.864,2.88,2.88,0,0,1,.1974,1.0368,2.519,2.519,0,0,1-.7793,1.87,2.7958,2.7958,0,0,1-1.9639.8031,2.78,2.78,0,0,1-1.039-.2033,2.8325,2.8325,0,0,1-.8937-.59,2.69,2.69,0,0,1-.5922-.864,2.6172,2.6172,0,0,1-.2078-1.0165,2.6295,2.6295,0,0,1,.81-1.9008,2.625,2.625,0,0,1,1.9223-.7827Zm.01.4879a2.1488,2.1488,0,0,0-1.5794.64,2.2237,2.2237,0,0,0-.4987.7115,2.1455,2.1455,0,0,0-.1663.8437,2.0485,2.0485,0,0,0,.1663.8234,2.1178,2.1178,0,0,0,.4987.7115,2.2461,2.2461,0,0,0,.7274.4778,2.19,2.19,0,0,0,.852.1626,2.2271,2.2271,0,0,0,.8521-.1626,2.5219,2.5219,0,0,0,.7481-.4778,2.054,2.054,0,0,0,.6338-1.5349,2.0762,2.0762,0,0,0-.1662-.8437,2.1586,2.1586,0,0,0-.478-.7115,2.2061,2.2061,0,0,0-1.59-.64Zm-.0312,1.7484-.374.1829a.3141.3141,0,0,0-.1351-.1626.3877.3877,0,0,0-.1663-.0508c-.2389,0-.3636.1524-.3636.4777a.5391.5391,0,0,0,.0935.3355.3067.3067,0,0,0,.27.1321.3459.3459,0,0,0,.3429-.2236l.3325.1626a.7185.7185,0,0,1-.3013.305.7291.7291,0,0,1-.4156.1118.8106.8106,0,0,1-.5923-.2135.8529.8529,0,0,1-.2286-.61.8359.8359,0,0,1,.2286-.61.7842.7842,0,0,1,.5819-.2236.7632.7632,0,0,1,.7273.3863Zm1.5794,0-.3636.1829a.3648.3648,0,0,0-.3118-.2134c-.2389,0-.3636.1524-.3636.4777a.5391.5391,0,0,0,.0935.3355.3067.3067,0,0,0,.27.1321.355.355,0,0,0,.3429-.2236l.3429.1626a.8077.8077,0,0,1-.3117.305.7291.7291,0,0,1-.4156.1118.7487.7487,0,0,1-.81-.8234.7959.7959,0,0,1,.2286-.61.8627.8627,0,0,1,1.2988.1627Z" style="fill-rule:evenodd"/><path d="M305.749,24.4515a2.24,2.24,0,0,1-.3221,1.1791,2.4008,2.4008,0,0,1-.8832.8742,2.5447,2.5447,0,0,1-1.2157.3151,2.4749,2.4749,0,0,1-1.2053-.3151,2.4293,2.4293,0,0,1-.8936-.8742,2.3192,2.3192,0,0,1,0-2.3582,2.4308,2.4308,0,0,1,.8936-.8742,2.4749,2.4749,0,0,1,1.2053-.3151,2.5447,2.5447,0,0,1,1.2157.3151,2.4023,2.4023,0,0,1,.8832.8742,2.2408,2.2408,0,0,1,.3221,1.1791Z" style="fill:#fff;fill-rule:evenodd"/><path d="M303.3176,21.7338a2.7048,2.7048,0,0,1,1.9431.7725,2.5792,2.5792,0,0,1,.8,1.911,2.45,2.45,0,0,1-.79,1.87,2.7008,2.7008,0,0,1-1.9535.8031,2.668,2.668,0,0,1-1.9327-.7929,2.5157,2.5157,0,0,1-.81-1.88,2.5982,2.5982,0,0,1,.81-1.911,2.6981,2.6981,0,0,1,1.9327-.7725Zm0,.4879a2.1655,2.1655,0,0,0-1.5794.64,2.13,2.13,0,0,0-.665,1.5552,2.07,2.07,0,0,0,.665,1.5349,2.1682,2.1682,0,0,0,1.5794.6506,2.2422,2.2422,0,0,0,1.6-.6608,1.98,1.98,0,0,0,.6442-1.5349,2.088,2.088,0,0,0-.6546-1.545,2.1891,2.1891,0,0,0-1.59-.64Zm.7378,1.5247v1.0978h-.3118v1.3011h-.852V24.8442H302.58V23.7464a.1665.1665,0,0,1,.0519-.1219.1742.1742,0,0,1,.1247-.0509h1.1222a.174.174,0,0,1,.1247.0509.1662.1662,0,0,1,.052.1219Zm-1.1222-.6912a.3845.3845,0,1,1,.3844.3761.3375.3375,0,0,1-.3844-.3761Z" style="fill-rule:evenodd"/><path d="M312.122,24.4007a2.4128,2.4128,0,0,1-.3325,1.21,2.4383,2.4383,0,0,1-.9143.8844,2.4959,2.4959,0,0,1-3.377-.8844,2.4128,2.4128,0,0,1-.3325-1.21,2.3578,2.3578,0,0,1,.3325-1.21,2.5034,2.5034,0,0,1,3.377-.8844,2.4375,2.4375,0,0,1,.9143.8844,2.3578,2.3578,0,0,1,.3325,1.21Z" style="fill:#fff;fill-rule:evenodd"/><path d="M311.6337,22.5063a2.817,2.817,0,0,0-3.8758,0,2.5983,2.5983,0,0,0-.81,1.911,2.548,2.548,0,0,0,.81,1.88,2.6679,2.6679,0,0,0,1.9327.7929,2.774,2.774,0,0,0,1.9639-.7929,2.5323,2.5323,0,0,0,.7793-1.88,2.5792,2.5792,0,0,0-.8-1.911Zm-.3429,3.4357a2.2248,2.2248,0,0,1-1.6.6608,2.1919,2.1919,0,0,1-1.5794-.6506,2.0756,2.0756,0,0,1-.665-1.5451,2.3219,2.3219,0,0,1,.1143-.7115l.7274.3151h-.052v.3253h.26c0,.0407-.01.0813-.01.1321v.0712h-.2494v.3253h.3013a1.25,1.25,0,0,0,.26.5794,1.3587,1.3587,0,0,0,1.1118.5082,1.6125,1.6125,0,0,0,.717-.1626l-.1039-.4981a1.523,1.523,0,0,1-.53.1118A.8246.8246,0,0,1,309.4,25.18a.8144.8144,0,0,1-.1455-.3151h.9975l1.4131.61a1.7768,1.7768,0,0,1-.374.4675Zm-1.7768-1.4027h0Zm.852-.2033h.0416v-.3253h-.7793l-.3118-.1321a.3816.3816,0,0,1,.0936-.1525.7007.7007,0,0,1,.5611-.244,1.5316,1.5316,0,0,1,.5091.1017l.1351-.5082a1.8313,1.8313,0,0,0-.6962-.1322,1.4183,1.4183,0,0,0-1.06.4574c-.0519.061-.1039.1423-.1558.2135l-.8936-.3863a2.0314,2.0314,0,0,1,.3013-.3659,2.1747,2.1747,0,0,1,1.5794-.6506,2.1982,2.1982,0,0,1,1.59.6506,2.0628,2.0628,0,0,1,.6546,1.5552,2.5876,2.5876,0,0,1-.0623.5692l-1.5067-.6505Z" style="fill-rule:evenodd"/><path d="M244.4736,33.7364h.2305v1.5015h.01a.8454.8454,0,0,1,.2905-.3052.7945.7945,0,0,1,.4355-.12c.2251,0,.7408.125.7408.9307v1.5215H245.95V35.7882c0-.4053-.145-.7754-.5752-.7754a.6892.6892,0,0,0-.646.5254.72.72,0,0,0-.0249.22v1.5064h-.2305Z" style="fill:#d9d9d9"/><path d="M247.2041,34.337v.5254h.6157v.19h-.6157v1.5811c0,.3154.1.4756.3252.4756a.6535.6535,0,0,0,.2305-.03l.03.18a.7551.7551,0,0,1-.2954.05.46.46,0,0,1-.3852-.16.8775.8775,0,0,1-.1353-.57V35.0528h-.3755v-.19h.3755v-.4306Z" style="fill:#d9d9d9"/><path d="M248.6143,34.337v.5254h.6157v.19h-.6157v1.5811c0,.3154.1.4756.3252.4756a.6533.6533,0,0,0,.23-.03l.03.18a.7551.7551,0,0,1-.2954.05.4605.4605,0,0,1-.3853-.16.8776.8776,0,0,1-.1352-.57V35.0528h-.3755v-.19h.3755v-.4306Z" style="fill:#d9d9d9"/><path d="M249.709,35.6232c0-.3355-.01-.5606-.02-.7608h.2207l.0146.4155h.01a.8269.8269,0,0,1,.7759-.4653c.5351,0,.9209.4653.9209,1.2207,0,.876-.4659,1.2813-.9815,1.2813a.7728.7728,0,0,1-.7-.4h-.01v1.3315h-.23Zm.23.7158a.78.78,0,0,0,.02.19.7016.7016,0,0,0,.6806.5908c.5,0,.7559-.4653.7559-1.0761,0-.5357-.25-1.0362-.7407-1.0362a.7542.7542,0,0,0-.6909.6206,1.4075,1.4075,0,0,0-.0249.2Z" style="fill:#d9d9d9"/><path d="M252.0986,36.9542a.8847.8847,0,0,0,.4854.1553c.34,0,.5156-.1953.5156-.45,0-.2505-.13-.3955-.4507-.5459-.355-.165-.5757-.3652-.5757-.6655a.6463.6463,0,0,1,.6958-.6353.8777.8777,0,0,1,.4908.145l-.0952.19a.7092.7092,0,0,0-.4253-.1348.399.399,0,0,0-.4356.4c0,.2349.14.34.44.49.34.1553.5855.3506.5855.7159a.6848.6848,0,0,1-.7608.69,1.054,1.054,0,0,1-.5605-.16Z" style="fill:#d9d9d9"/><path d="M253.9839,35.4278a.1994.1994,0,0,1-.19-.2148.2023.2023,0,0,1,.1953-.2207.2.2,0,0,1,.1953.2207.1974.1974,0,0,1-.1953.2148Zm0,1.8868a.2.2,0,0,1-.19-.2149.2022.2022,0,0,1,.1953-.22.2188.2188,0,0,1,0,.4351Z" style="fill:#d9d9d9"/><path d="M254.3936,37.465l1.4213-3.6236h.23l-1.4262,3.6236Z" style="fill:#d9d9d9"/><path d="M256.0435,37.465l1.4213-3.6236h.23l-1.4267,3.6236Z" style="fill:#d9d9d9"/><path d="M258.0938,37.2648v-2.212h-.33v-.19h.33v-.16c0-.5855.2651-1.0161.871-1.0161a.8779.8779,0,0,1,.5054.1552l-.1.18a.8118.8118,0,0,0-.4453-.1353c-.4756,0-.6006.3707-.6006.836v.14h1.251v2.4024h-.2305v-2.212h-1.02v2.212Z" style="fill:#d9d9d9"/><path d="M262.0352,33.7364v2.9878c0,.16.01.3955.02.5406H261.85l-.0151-.4156H261.82a.8.8,0,0,1-.751.4654c-.5156,0-.9257-.4551-.9257-1.2159,0-.8208.4453-1.2861.9658-1.2861a.7372.7372,0,0,1,.6855.39h.01V33.7364Zm-.23,2.0621a1.0167,1.0167,0,0,0-.02-.2056.69.69,0,0,0-.6607-.5855c-.4756,0-.7456.4756-.7456,1.0713,0,.5406.23,1.041.7305,1.041a.7064.7064,0,0,0,.6758-.6005.7868.7868,0,0,0,.02-.1905Z" style="fill:#d9d9d9"/><path d="M262.7734,33.7364h.2305v3.5284h-.2305Z" style="fill:#d9d9d9"/><path d="M263.8232,36.0187c0,.8305.3956,1.0908.8208,1.0908a1.1453,1.1453,0,0,0,.5806-.125l.0552.18a1.3479,1.3479,0,0,1-.6655.145c-.6455,0-1.021-.5005-1.021-1.211,0-.791.4052-1.2861.9707-1.2861.6958,0,.8359.69.8359,1.0606a1.2966,1.2966,0,0,1-.0049.1455Zm1.3365-.1856c.01-.41-.1651-.8257-.6255-.8257-.4556,0-.6607.4507-.7007.8257Z" style="fill:#d9d9d9"/><path d="M265.9937,37.3146a.2.2,0,0,1-.19-.2149.2022.2022,0,0,1,.1953-.22.2188.2188,0,0,1,0,.4351Z" style="fill:#d9d9d9"/><path d="M268.2549,37.17a1.3213,1.3213,0,0,1-.6109.14c-.6254,0-1.0459-.48-1.0459-1.2261a1.154,1.154,0,0,1,1.1211-1.271,1.0549,1.0549,0,0,1,.5406.13l-.08.1953a.9565.9565,0,0,0-.4854-.12c-.581,0-.8613.51-.8613,1.061,0,.63.3506,1.0308.8511,1.0308a1.156,1.156,0,0,0,.51-.12Z" style="fill:#d9d9d9"/><path d="M268.7686,35.4327c0-.2451-.01-.3852-.0206-.57h.2154l.0151.3955h.01a.7946.7946,0,0,1,.7358-.4453c.24,0,.751.1348.751.9458v1.5064h-.2305V35.7985c0-.4058-.1352-.7857-.5854-.7857a.6765.6765,0,0,0-.6358.5352.9646.9646,0,0,0-.0249.2153v1.5015h-.23Z" style="fill:#d9d9d9"/><path d="M271.2031,35.5333c0-.2207-.01-.4609-.0195-.6709h.2148l.01.4556h.01a.7.7,0,0,1,.6357-.5054c.03,0,.0552.0049.08.0049v.23a.4988.4988,0,0,0-.09-.0049c-.3251,0-.5454.2905-.5952.6509a1.54,1.54,0,0,0-.0151.22v1.3516h-.2305Z" style="fill:#d9d9d9"/><path d="M272.4683,36.9542a.884.884,0,0,0,.4853.1553c.34,0,.5156-.1953.5156-.45,0-.2505-.13-.3955-.4506-.5459-.3555-.165-.5752-.3652-.5752-.6655a.6458.6458,0,0,1,.6953-.6353.8776.8776,0,0,1,.4907.145l-.0952.19a.7089.7089,0,0,0-.4253-.1348.3987.3987,0,0,0-.4356.4c0,.2349.14.34.4405.49.34.1553.5854.3506.5854.7159a.6849.6849,0,0,1-.7607.69,1.0555,1.0555,0,0,1-.5606-.16Z" style="fill:#d9d9d9"/><path d="M274.2935,37.3146a.2.2,0,0,1-.19-.2149.2022.2022,0,0,1,.1953-.22.2188.2188,0,0,1,0,.4351Z" style="fill:#d9d9d9"/><path d="M275.103,37.2648v-2.212h-.3305v-.19h.3305v-.145a1.195,1.195,0,0,1,.24-.8257.6984.6984,0,0,1,.5-.2055.6609.6609,0,0,1,.3251.0751l-.07.1851a.5924.5924,0,0,0-.2754-.06c-.4106,0-.4907.3956-.4907.8257v.15h.5859v.19h-.5859v2.212Z" style="fill:#d9d9d9"/><path d="M276.2783,35.5333c0-.2207-.01-.4609-.0205-.6709h.2153l.01.4556h.01a.7.7,0,0,1,.6357-.5054c.03,0,.0552.0049.08.0049v.23a.4959.4959,0,0,0-.09-.0049c-.3256,0-.5459.2905-.5957.6509a1.54,1.54,0,0,0-.0151.22v1.3516h-.23Z" style="fill:#d9d9d9"/></svg> \ No newline at end of file diff --git a/fidle/img/00-Fidle-logo-00.svg b/fidle/img/00-Fidle-logo-00.svg deleted file mode 100755 index 21a3ea7..0000000 --- a/fidle/img/00-Fidle-logo-00.svg +++ /dev/null @@ -1 +0,0 @@ -<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 56.6098 67.4017"><title>00-Fidle-logo-00</title><g id="Calque_2" data-name="Calque 2"><g id="Calque_4" data-name="Calque 4"><path d="M47.9919,32.977a13.424,13.424,0,0,0,5.4811-1.8386,6.7273,6.7273,0,0,1,2.455-.96,15.2069,15.2069,0,0,0-5.9862-17.0857A17.7216,17.7216,0,0,0,37.6058,11.01c-4.3345.7164-8.8269,3.996-10.5862,5.4673C25.961,17.3639,13.369,29.3005,8.1168,26.9216c-3.5532-1.61,2.7909-7.4675-.1189-12.82a.2323.2323,0,0,0-.3874-.0258c-1.4813,1.9984-2.9293,4.3968-4.9019,3.32-.8812-.4812-1.6744-2.0178-2.2858-2.99A.23.23,0,0,0,0,14.5371,24.26,24.26,0,0,0,6.0983,29.3426c4.5289,5.4189,12.465,11.7291,25.2885,13.0059,5.5522.5529,18.7217-1.1976,23.9833-10.6647a13.2741,13.2741,0,0,0-1.2693.63,14.7716,14.7716,0,0,1-5.9875,1.9915c-.1831.0169-.3649.0245-.5466.0245a10.5714,10.5714,0,0,1-5.9687-2.3927,1.1184,1.1184,0,1,1,.8549-1.0851c0,.0183-.0044.0353-.0057.0535C43.53,31.7328,45.7847,33.1928,47.9919,32.977ZM31.2094,37.9323a20.3764,20.3764,0,0,1-4.7961.8712c-1.0832.0006-1.5335-.307-1.748-.768-.5643-1.2134,1.4687-2.9677,3.272-4.2263A.6668.6668,0,1,1,28.7,34.903a10.991,10.991,0,0,0-2.7544,2.5318c.3523.0761,1.4964.1245,4.9176-.7913a.6672.6672,0,0,1,.3459,1.2888Zm15.45-16.2541a2.5468,2.5468,0,0,1,2.4726,2.4538,1.6639,1.6639,0,1,0-1.4731,2.4317,1.7278,1.7278,0,0,0,.3088-.0308,2.37,2.37,0,0,1-1.3083.4025,2.6324,2.6324,0,0,1,0-5.2572ZM38.0706,4.6089a1.3336,1.3336,0,0,0,.524,1.8116c.6453.3553,2.0046-.4177,2.8292.7346.4284.5988-.8963-2.7147-1.5417-3.0708A1.3328,1.3328,0,0,0,38.0706,4.6089Zm6.7939.1428c-1.6619.9743-1.97,5.0031-1.5417,4.4043A7.584,7.584,0,0,1,46.152,7.0878a1.3337,1.3337,0,0,0-1.2875-2.3361ZM43.1787.31c-.85.9831-.2679,3.5325-.1157,3.0651a5.4212,5.4212,0,0,1,1.3687-1.926A.8741.8741,0,0,0,44.41.2135.8656.8656,0,0,0,43.1787.31Z" style="fill:#e12229"/><path d="M2.9731,50.3685h8.3535v1.0034H4.1518v6.7232h6.5728v1.0034H4.1518v8.1777H2.9731Z" style="fill:#808285"/><path d="M15.7407,50.3685V67.2762H14.562V50.3685Z" style="fill:#808285"/><path d="M19.8549,50.5946a27.8016,27.8016,0,0,1,4.3145-.3516c3.0855,0,5.4185.8282,6.8232,2.3081A7.9974,7.9974,0,0,1,33.1,58.3959a9.3118,9.3118,0,0,1-2.2076,6.4218c-1.5053,1.6309-4.039,2.584-7.3,2.584a36.0492,36.0492,0,0,1-3.7378-.15Zm1.1788,15.6782a21.3406,21.3406,0,0,0,2.7343.1255c5.4185,0,8.1026-3.0352,8.1026-7.9272.0254-4.29-2.3579-7.2247-7.7764-7.2247a17.9379,17.9379,0,0,0-3.0605.251Z" style="fill:#808285"/><path d="M36.0844,50.3685h1.1788V66.2728h7.5507v1.0034H36.0844Z" style="fill:#808285"/><path d="M55.2744,58.8724H48.5009v7.4h7.5762v1.0034H47.3222V50.3685h8.3535v1.0034H48.5009V57.869h6.7735Z" style="fill:#808285"/></g></g></svg> \ No newline at end of file diff --git a/fidle/img/00-Fidle-logo-01.svg b/fidle/img/00-Fidle-logo-01.svg deleted file mode 100755 index 286bff6..0000000 --- a/fidle/img/00-Fidle-logo-01.svg +++ /dev/null @@ -1 +0,0 @@ -<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 140.2164 40.848"><title>00-Fidle-logo-01</title><g id="Calque_2" data-name="Calque 2"><g id="Calque_4" data-name="Calque 4"><path d="M46.1913,31.74a12.9222,12.9222,0,0,0,5.2755-1.77,6.4763,6.4763,0,0,1,2.3629-.9239,14.6364,14.6364,0,0,0-5.7616-16.4446,17.0565,17.0565,0,0,0-11.8732-2.0051c-4.1719.69-8.4957,3.8461-10.189,5.2622-1.0189.8536-13.1385,12.3424-18.1936,10.0527-3.42-1.5492,2.6862-7.1873-.1144-12.3393a.2236.2236,0,0,0-.373-.0248c-1.4257,1.9233-2.8193,4.2317-4.7179,3.1953-.8482-.4632-1.6116-1.9422-2.2-2.8775A.2216.2216,0,0,0,0,13.9917,23.35,23.35,0,0,0,5.87,28.2417a35.3776,35.3776,0,0,0,24.34,12.518c5.3439.5321,18.0193-1.1527,23.0835-10.2646a12.7681,12.7681,0,0,0-1.2217.6066,14.2177,14.2177,0,0,1-5.7629,1.9167c-.1761.0163-.3511.0236-.5261.0236a10.1733,10.1733,0,0,1-5.7446-2.303,1.0764,1.0764,0,1,1,.8227-1.0443c0,.0176-.0042.0339-.0054.0515C41.8966,30.5423,44.0669,31.9474,46.1913,31.74ZM30.0385,36.5091a19.6093,19.6093,0,0,1-4.6162.8385c-1.0425.0006-1.476-.2954-1.6824-.7392-.5431-1.1678,1.4136-2.8563,3.1493-4.0677a.6418.6418,0,1,1,.7343,1.0528,10.5781,10.5781,0,0,0-2.651,2.4368c.339.0732,1.44.12,4.733-.7616a.6422.6422,0,0,1,.333,1.24Zm14.87-15.6442a2.4512,2.4512,0,0,1,2.38,2.3617,1.6015,1.6015,0,1,0-1.4179,2.34,1.6573,1.6573,0,0,0,.2973-.03,2.28,2.28,0,0,1-1.2593.3875,2.5337,2.5337,0,0,1,0-5.06ZM36.6423,4.436A1.2835,1.2835,0,0,0,37.1466,6.18c.6211.342,1.9294-.402,2.7231.7071.4122.5763-.8627-2.6129-1.4839-2.9556A1.2827,1.2827,0,0,0,36.6423,4.436Zm6.5389.1374c-1.5995.9378-1.8961,4.8154-1.4838,4.2391a7.2989,7.2989,0,0,1,2.7231-1.9906,1.2837,1.2837,0,0,0-1.2393-2.2485ZM41.5587.2981c-.8179.9462-.2579,3.4-.1114,2.95a5.2169,5.2169,0,0,1,1.3174-1.8537A.8415.8415,0,0,0,42.7441.2054.8332.8332,0,0,0,41.5587.2981Z" style="fill:#e12229"/><path d="M65.6671,13.7493H77.3946V15.158H67.3223v9.4379h9.2271v1.4087H67.3223v11.481H65.6671Z" style="fill:#808285"/><path d="M83.5909,13.7493V37.4856H81.9356V13.7493Z" style="fill:#808285"/><path d="M89.3658,14.0662a39.0353,39.0353,0,0,1,6.0576-.4932c4.3316,0,7.607,1.1621,9.5791,3.24a11.2256,11.2256,0,0,1,2.958,8.2056,13.0738,13.0738,0,0,1-3.0991,9.0156c-2.1128,2.2891-5.67,3.6275-10.248,3.6275a50.7148,50.7148,0,0,1-5.2476-.2115Zm1.6553,22.0107a29.8576,29.8576,0,0,0,3.8388.1763c7.607,0,11.375-4.2617,11.375-11.1289.0352-6.022-3.31-10.1426-10.9174-10.1426a25.2377,25.2377,0,0,0-4.2964.352Z" style="fill:#808285"/><path d="M112.15,13.7493h1.6553V36.0769h10.6006v1.4087H112.15Z" style="fill:#808285"/><path d="M139.0894,25.6877h-9.5088V36.0769h10.6358v1.4087h-12.291V13.7493h11.7275V15.158H129.5806v9.1211h9.5088Z" style="fill:#808285"/></g></g></svg> \ No newline at end of file diff --git a/fidle/img/00-Fidle-titre-01.svg b/fidle/img/00-Fidle-titre-01.svg deleted file mode 100755 index 461754b..0000000 --- a/fidle/img/00-Fidle-titre-01.svg +++ /dev/null @@ -1 +0,0 @@ -<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 343.3548 80.1648"><title>00-Fidle-titre-01</title><g id="Calque_2" data-name="Calque 2"><g id="Calque_4" data-name="Calque 4"><path d="M80.4129,3.657h4.3535v.8755h-3.31V7.219h3.0581v.8633H81.4564v3.6582H80.4129Z"/><path d="M91.1019,8.79a2.8367,2.8367,0,0,1-2.89,3.082A2.76,2.76,0,0,1,85.429,8.886a2.8343,2.8343,0,0,1,2.8784-3.082A2.7484,2.7484,0,0,1,91.1019,8.79Zm-4.6055.06c0,1.2714.7319,2.2309,1.7632,2.2309,1.0073,0,1.7632-.9473,1.7632-2.2549,0-.9834-.4917-2.23-1.7393-2.23S86.4964,7.7468,86.4964,8.85Z"/><path d="M92.4368,7.7468c0-.6836-.0122-1.2715-.0478-1.811h.9233l.0362,1.1392h.0478a1.7414,1.7414,0,0,1,1.6069-1.271,1.15,1.15,0,0,1,.3.0356v.9956a1.5954,1.5954,0,0,0-.36-.0361,1.4811,1.4811,0,0,0-1.4156,1.3554,2.98,2.98,0,0,0-.0478.4917v3.0943H92.4368Z"/><path d="M96.4085,7.5066c0-.6-.0122-1.0913-.0478-1.5708h.9233l.0479.9355h.0361A2.0009,2.0009,0,0,1,99.1912,5.804a1.7161,1.7161,0,0,1,1.6431,1.163h.0239a2.3871,2.3871,0,0,1,.6475-.7553,1.9781,1.9781,0,0,1,1.2832-.4077c.7676,0,1.9072.5034,1.9072,2.5185v3.418h-1.0317V8.4543c0-1.1152-.4077-1.7871-1.2593-1.7871a1.3662,1.3662,0,0,0-1.2471.9595,1.7454,1.7454,0,0,0-.084.5278v3.586h-1.0312V8.2625c0-.9234-.4082-1.5953-1.2114-1.5953a1.4429,1.4429,0,0,0-1.3077,1.0557,1.4635,1.4635,0,0,0-.0839.5156v3.502H96.4085Z"/><path d="M109.6312,11.7405l-.084-.7315h-.0361a2.162,2.162,0,0,1-1.7749.8633,1.6538,1.6538,0,0,1-1.775-1.667c0-1.4033,1.2471-2.1709,3.49-2.1587v-.12a1.1966,1.1966,0,0,0-1.3193-1.3432,2.8923,2.8923,0,0,0-1.5108.4316l-.24-.6953a3.6031,3.6031,0,0,1,1.9072-.5156c1.7749,0,2.2066,1.2109,2.2066,2.3745v2.1709a8.15,8.15,0,0,0,.0961,1.3911Zm-.1558-2.9624c-1.1514-.0239-2.459.18-2.459,1.3071a.9345.9345,0,0,0,.9956,1.0078,1.445,1.445,0,0,0,1.4034-.9717,1.1138,1.1138,0,0,0,.06-.3359Z"/><path d="M113.4241,4.2688v1.667h1.5113V6.739h-1.5113v3.13c0,.72.2037,1.1274.7915,1.1274a2.3641,2.3641,0,0,0,.6114-.0717l.0483.7915a2.5858,2.5858,0,0,1-.9355.144,1.4648,1.4648,0,0,1-1.14-.4438,2.1515,2.1515,0,0,1-.4077-1.5113V6.739h-.8994V5.9358h.8994V4.5447Z"/><path d="M117.3,4.3044a.6536.6536,0,0,1-1.3071,0,.6433.6433,0,0,1,.66-.66A.6266.6266,0,0,1,117.3,4.3044Zm-1.1753,7.4361V5.9358h1.0557v5.8047Z"/><path d="M124.1854,8.79a2.8367,2.8367,0,0,1-2.89,3.082,2.76,2.76,0,0,1-2.7827-2.9863,2.8343,2.8343,0,0,1,2.8784-3.082A2.7484,2.7484,0,0,1,124.1854,8.79Zm-4.6055.06c0,1.2714.7319,2.2309,1.7632,2.2309,1.0073,0,1.7632-.9473,1.7632-2.2549,0-.9834-.4917-2.23-1.7393-2.23S119.58,7.7468,119.58,8.85Z"/><path d="M125.52,7.5066c0-.6-.0117-1.0913-.0478-1.5708h.9355l.06.9595h.0244a2.1341,2.1341,0,0,1,1.919-1.0913c.8032,0,2.0507.4795,2.0507,2.47v3.4663h-1.0556V8.3943c0-.9356-.3477-1.7149-1.3433-1.7149a1.4994,1.4994,0,0,0-1.415,1.0791,1.5207,1.5207,0,0,0-.0718.4917v3.49H125.52Z"/><path d="M83.1717,16.3289V35.4373H80.7054V16.3289Z"/><path d="M87.6785,25.4294c0-1.4174-.0288-2.58-.1137-3.7138h2.2114l.1421,2.268h.0566a5.0457,5.0457,0,0,1,4.5362-2.58c1.8994,0,4.8476,1.1342,4.8476,5.84v8.1934H96.8641v-7.91c0-2.2114-.8223-4.0542-3.1753-4.0542a3.5446,3.5446,0,0,0-3.3457,2.5513,3.5922,3.5922,0,0,0-.17,1.1626v8.25H87.6785Z"/><path d="M106.3026,17.7747v3.9409h3.5722V23.615h-3.5722v7.3994c0,1.7012.4819,2.665,1.8711,2.665a5.55,5.55,0,0,0,1.4458-.17l.1137,1.8711a6.11,6.11,0,0,1-2.2114.34,3.4564,3.4564,0,0,1-2.6934-1.0494,5.082,5.082,0,0,1-.9638-3.5722V23.615h-2.1265V21.7156h2.1265V18.427Z"/><path d="M112.68,25.9963c0-1.6157-.0283-3.0048-.1138-4.2807h2.1831l.085,2.6933h.1138a4.1165,4.1165,0,0,1,3.7988-3.0053,2.6989,2.6989,0,0,1,.7085.0849v2.3531a3.7451,3.7451,0,0,0-.85-.085A3.5,3.5,0,0,0,115.26,26.96a7.0263,7.0263,0,0,0-.1133,1.1626v7.3145H112.68Z"/><path d="M134.0838,28.4631c0,5.0747-3.5156,7.2862-6.8325,7.2862-3.7139,0-6.5776-2.7217-6.5776-7.06,0-4.5928,3.0053-7.2861,6.8042-7.2861C131.4188,21.4036,134.0838,24.2668,134.0838,28.4631Zm-10.8867.1416c0,3.0054,1.7295,5.2735,4.1675,5.2735,2.3813,0,4.1674-2.24,4.1674-5.33,0-2.3247-1.1621-5.2734-4.1108-5.2734S123.1971,25.9963,123.1971,28.6047Z"/><path d="M149.0789,15.3084V31.8933c0,1.2193.0283,2.6084.1133,3.544h-2.24l-.1133-2.3814h-.0567a5.0765,5.0765,0,0,1-4.6782,2.6934c-3.3169,0-5.8686-2.8067-5.8686-6.9746-.0284-4.5645,2.8071-7.3711,6.1523-7.3711a4.5871,4.5871,0,0,1,4.1392,2.0981h.0566V15.3084Zm-2.4951,11.9921a4.4023,4.4023,0,0,0-.1133-1.0488A3.6693,3.6693,0,0,0,142.87,23.36c-2.58,0-4.1108,2.2681-4.1108,5.3018,0,2.7783,1.3608,5.0747,4.0542,5.0747a3.75,3.75,0,0,0,3.6572-2.9766,4.3384,4.3384,0,0,0,.1133-1.0776Z"/><path d="M164.6981,31.6951c0,1.4175.0283,2.665.1132,3.7422H162.6l-.1416-2.24h-.0566a5.1641,5.1641,0,0,1-4.5362,2.5518c-2.1547,0-4.7348-1.1909-4.7348-6.0108V21.7156h2.4951v7.5981c0,2.6079.7939,4.3657,3.062,4.3657a3.6066,3.6066,0,0,0,3.2886-2.268,3.6533,3.6533,0,0,0,.2265-1.2759v-8.42h2.4952Z"/><path d="M178.6155,34.927a9.1854,9.1854,0,0,1-3.9409.794c-4.1391,0-6.8325-2.8072-6.8325-7.003a6.9724,6.9724,0,0,1,7.3716-7.2861,8.3342,8.3342,0,0,1,3.4585.709l-.5669,1.9277a5.783,5.783,0,0,0-2.8916-.6523c-3.147,0-4.8482,2.3252-4.8482,5.1884,0,3.1753,2.0411,5.1314,4.7627,5.1314a7.0916,7.0916,0,0,0,3.0621-.68Z"/><path d="M184.9075,17.7747v3.9409H188.48V23.615h-3.5723v7.3994c0,1.7012.482,2.665,1.8711,2.665a5.5565,5.5565,0,0,0,1.4463-.17l.1133,1.8711a6.1106,6.1106,0,0,1-2.2114.34,3.4562,3.4562,0,0,1-2.6934-1.0494,5.0815,5.0815,0,0,1-.9639-3.5722V23.615h-2.1264V21.7156H182.47V18.427Z"/><path d="M194.0633,17.86a1.5452,1.5452,0,0,1-3.09,0,1.52,1.52,0,0,1,1.559-1.5591A1.481,1.481,0,0,1,194.0633,17.86ZM191.285,35.4373V21.7156H193.78V35.4373Z"/><path d="M210.3353,28.4631c0,5.0747-3.5157,7.2862-6.8325,7.2862-3.7139,0-6.5777-2.7217-6.5777-7.06,0-4.5928,3.0054-7.2861,6.8042-7.2861C207.67,21.4036,210.3353,24.2668,210.3353,28.4631Zm-10.8867.1416c0,3.0054,1.7294,5.2735,4.1674,5.2735,2.3814,0,4.1675-2.24,4.1675-5.33,0-2.3247-1.1621-5.2734-4.1108-5.2734S199.4486,25.9963,199.4486,28.6047Z"/><path d="M213.48,25.4294c0-1.4174-.0288-2.58-.1138-3.7138h2.2114l.1421,2.268h.0567a5.0455,5.0455,0,0,1,4.5361-2.58c1.8994,0,4.8477,1.1342,4.8477,5.84v8.1934h-2.4947v-7.91c0-2.2114-.8222-4.0542-3.1753-4.0542a3.5446,3.5446,0,0,0-3.3457,2.5513,3.5949,3.5949,0,0,0-.17,1.1626v8.25H213.48Z"/><path d="M236.3245,35.3064l-.084-.7314h-.0361a2.1621,2.1621,0,0,1-1.7749.8632,1.6537,1.6537,0,0,1-1.7749-1.667c0-1.4033,1.2471-2.1709,3.49-2.1586v-.12a1.1967,1.1967,0,0,0-1.3194-1.3432,2.8922,2.8922,0,0,0-1.5107.4316l-.24-.6953a3.6032,3.6032,0,0,1,1.9073-.5156c1.7749,0,2.2065,1.2109,2.2065,2.3745v2.1709a8.1393,8.1393,0,0,0,.0962,1.3911Zm-.1557-2.9624c-1.1514-.0239-2.459.18-2.459,1.3071a.9344.9344,0,0,0,.9956,1.0078,1.4447,1.4447,0,0,0,1.4033-.9716,1.112,1.112,0,0,0,.06-.336Z"/><path d="M243.7513,33.7234c0,.6.0117,1.1274.0478,1.583h-.9355l-.06-.9473H242.78a2.1844,2.1844,0,0,1-1.9189,1.0791c-.9112,0-2.003-.5034-2.003-2.5424V29.5017h1.0557v3.2144c0,1.103.3359,1.8466,1.2954,1.8466a1.4839,1.4839,0,0,0,1.4868-1.499v-3.562h1.0557Z"/><path d="M80.9622,41.8367a54.6064,54.6064,0,0,1,8.1128-.6143c5.482,0,9.3848,1.272,11.9722,3.6841,2.6313,2.4116,4.166,5.8325,4.166,10.6123,0,4.8242-1.4912,8.771-4.2539,11.49-2.7627,2.7627-7.3232,4.254-13.0684,4.254a61.5851,61.5851,0,0,1-6.9287-.3511Zm3.815,26.1367a24.1078,24.1078,0,0,0,3.8593.2192c8.1568,0,12.586-4.56,12.586-12.542.0439-6.9731-3.9029-11.4023-11.9722-11.4023a21.6824,21.6824,0,0,0-4.4731.395Z" style="fill:#e12229"/><path d="M112.3631,61.0881c.0879,5.2188,3.4209,7.3677,7.28,7.3677a13.9592,13.9592,0,0,0,5.8765-1.0962l.6577,2.7627a17.1944,17.1944,0,0,1-7.0606,1.3154c-6.5342,0-10.437-4.2978-10.437-10.7s3.7715-11.4458,9.9546-11.4458c6.9292,0,8.771,6.0957,8.771,9.9985a14.5269,14.5269,0,0,1-.1318,1.7978Zm11.3145-2.7627c.0439-2.4555-1.0088-6.271-5.35-6.271-3.9034,0-5.6133,3.5962-5.9205,6.271Z" style="fill:#e12229"/><path d="M134.3353,61.0881c.0879,5.2188,3.42,7.3677,7.28,7.3677a13.9583,13.9583,0,0,0,5.8764-1.0962l.6577,2.7627a17.1938,17.1938,0,0,1-7.0605,1.3154c-6.5342,0-10.437-4.2978-10.437-10.7s3.7715-11.4458,9.9546-11.4458c6.9292,0,8.771,6.0957,8.771,9.9985a14.5029,14.5029,0,0,1-.1319,1.7978ZM145.65,58.3254c.044-2.4555-1.0088-6.271-5.35-6.271-3.9033,0-5.6133,3.5962-5.92,6.271Z" style="fill:#e12229"/><path d="M154.1585,56.7029c0-2.7188-.0874-4.9116-.1753-6.9287h3.4644l.1753,3.64h.0879a8.3443,8.3443,0,0,1,7.5429-4.122c5.1309,0,8.99,4.3413,8.99,10.788,0,7.63-4.6485,11.4019-9.6475,11.4019a7.443,7.443,0,0,1-6.5342-3.333h-.0879V79.6824h-3.8154Zm3.8154,5.6572a8.72,8.72,0,0,0,.1753,1.5786,5.9547,5.9547,0,0,0,5.7891,4.5171c4.0781,0,6.4463-3.333,6.4463-8.2007,0-4.2539-2.2363-7.8935-6.315-7.8935a6.1579,6.1579,0,0,0-5.8325,4.78,6.25,6.25,0,0,0-.2632,1.5786Z" style="fill:#e12229"/><path d="M188.5418,41.4417h3.815V67.7981h12.63v3.2012H188.5418Z" style="fill:#e12229"/><path d="M210.6453,61.0881c.0879,5.2188,3.4209,7.3677,7.28,7.3677a13.9592,13.9592,0,0,0,5.8765-1.0962l.6577,2.7627a17.1944,17.1944,0,0,1-7.0606,1.3154c-6.5341,0-10.437-4.2978-10.437-10.7s3.7715-11.4458,9.9546-11.4458c6.9292,0,8.771,6.0957,8.771,9.9985a14.5269,14.5269,0,0,1-.1318,1.7978ZM221.96,58.3254c.0439-2.4555-1.0088-6.271-5.35-6.271-3.9033,0-5.6133,3.5962-5.92,6.271Z" style="fill:#e12229"/><path d="M242.2215,70.9993l-.3071-2.6753h-.1314a7.9054,7.9054,0,0,1-6.49,3.1577c-4.2979,0-6.4907-3.0259-6.4907-6.0957,0-5.1309,4.561-7.9375,12.7617-7.8936V57.054c0-1.7544-.4824-4.9117-4.8237-4.9117a10.575,10.575,0,0,0-5.5259,1.5787l-.877-2.5435a13.1724,13.1724,0,0,1,6.9727-1.8857c6.49,0,8.0693,4.4292,8.0693,8.6831v7.9375a29.8785,29.8785,0,0,0,.3506,5.0869Zm-.57-10.8321c-4.21-.0874-8.99.6582-8.99,4.78a3.415,3.415,0,0,0,3.64,3.6836,5.28,5.28,0,0,0,5.1309-3.5522,4.0186,4.0186,0,0,0,.2192-1.2276Z" style="fill:#e12229"/><path d="M251.6077,56.3958c0-2.5-.0434-4.6485-.1753-6.6216h3.377l.1313,4.166h.1753a6.3682,6.3682,0,0,1,5.8765-4.6484,4.1855,4.1855,0,0,1,1.0962.1313v3.64a5.7817,5.7817,0,0,0-1.3154-.1318c-2.7188,0-4.6485,2.061-5.1748,4.9556a10.8951,10.8951,0,0,0-.1753,1.7978V70.9993h-3.8155Z" style="fill:#e12229"/><path d="M266.1243,55.5188c0-2.1924-.0434-3.9907-.1753-5.7446H269.37l.2192,3.5083h.0879a7.8048,7.8048,0,0,1,7.0166-3.9907c2.9385,0,7.499,1.7539,7.499,9.0336V70.9993h-3.8589V58.7639c0-3.42-1.272-6.271-4.9116-6.271a5.4823,5.4823,0,0,0-5.1748,3.9468,5.5386,5.5386,0,0,0-.2632,1.7983V70.9993h-3.8594Z" style="fill:#e12229"/><path d="M294.763,43.81a2.39,2.39,0,0,1-4.78,0,2.3514,2.3514,0,0,1,2.4121-2.4116A2.29,2.29,0,0,1,294.763,43.81Zm-4.2978,27.19V49.7742h3.8593V70.9993Z" style="fill:#e12229"/><path d="M300.7274,55.5188c0-2.1924-.044-3.9907-.1753-5.7446h3.42l.22,3.5083h.0874a7.8048,7.8048,0,0,1,7.0166-3.9907c2.9385,0,7.499,1.7539,7.499,9.0336V70.9993h-3.8589V58.7639c0-3.42-1.2719-6.271-4.9116-6.271A5.4823,5.4823,0,0,0,304.85,56.44a5.5386,5.5386,0,0,0-.2632,1.7983V70.9993h-3.8593Z" style="fill:#e12229"/><path d="M343.3548,49.7742c-.0874,1.5346-.1753,3.2451-.1753,5.8325V67.9294c0,4.8677-.9648,7.85-3.0259,9.6919-2.061,1.9292-5.0434,2.5435-7.7182,2.5435a13.7015,13.7015,0,0,1-7.0606-1.7544l.9649-2.938a11.9683,11.9683,0,0,0,6.227,1.6665c3.9468,0,6.8413-2.061,6.8413-7.4116V67.36H339.32a7.5769,7.5769,0,0,1-6.7534,3.5518c-5.2622,0-9.0337-4.4727-9.0337-10.3491,0-7.1924,4.6924-11.27,9.56-11.27a7.06,7.06,0,0,1,6.6221,3.6835h.0874l.1757-3.2011Zm-3.9907,8.376a5.501,5.501,0,0,0-.2193-1.7544,5.5591,5.5591,0,0,0-5.394-4.0782c-3.6836,0-6.3149,3.1138-6.3149,8.0254,0,4.166,2.1049,7.63,6.271,7.63a5.6425,5.6425,0,0,0,5.35-3.9468,6.7119,6.7119,0,0,0,.3067-2.061Z" style="fill:#e12229"/><path d="M47.9919,36.84a13.4225,13.4225,0,0,0,5.4811-1.8386,6.7273,6.7273,0,0,1,2.455-.96,15.2069,15.2069,0,0,0-5.9862-17.0857,17.7222,17.7222,0,0,0-12.336-2.0832c-4.3345.7164-8.8269,3.996-10.5862,5.4673C25.961,21.2266,13.369,33.1632,8.1168,30.7843c-3.5532-1.61,2.7909-7.4675-.1189-12.82a.2323.2323,0,0,0-.3874-.0258c-1.4813,1.9984-2.9293,4.3968-4.9019,3.32-.8812-.4812-1.6744-2.0178-2.2858-2.99A.23.23,0,0,0,0,18.4,24.26,24.26,0,0,0,6.0983,33.2053c4.5289,5.4189,12.465,11.7291,25.2885,13.0059,5.5522.5529,18.7217-1.1976,23.9833-10.6647a13.2978,13.2978,0,0,0-1.2693.63,14.7716,14.7716,0,0,1-5.9875,1.9915c-.1831.0169-.3649.0245-.5466.0245A10.5707,10.5707,0,0,1,41.598,35.8a1.1184,1.1184,0,1,1,.8549-1.0851c0,.0183-.0044.0353-.0057.0535C43.53,35.5955,45.7847,37.0555,47.9919,36.84ZM31.2094,41.795a20.3764,20.3764,0,0,1-4.7961.8712c-1.0832.0006-1.5335-.307-1.748-.768-.5643-1.2134,1.4687-2.9677,3.272-4.2263a.6668.6668,0,1,1,.763,1.0938,10.991,10.991,0,0,0-2.7544,2.5318c.3523.0761,1.4964.1245,4.9176-.7913a.6672.6672,0,0,1,.3459,1.2888Zm15.45-16.2541a2.5468,2.5468,0,0,1,2.4726,2.4538,1.6639,1.6639,0,1,0-1.4731,2.4317,1.7278,1.7278,0,0,0,.3088-.0308,2.37,2.37,0,0,1-1.3083.4025,2.6324,2.6324,0,0,1,0-5.2572ZM38.0706,8.4716a1.3336,1.3336,0,0,0,.524,1.8116c.6453.3554,2.0046-.4177,2.8292.7346.4284.5989-.8963-2.7147-1.5417-3.0708A1.3328,1.3328,0,0,0,38.0706,8.4716Zm6.7939.1428c-1.6619.9743-1.97,5.0031-1.5417,4.4043a7.584,7.584,0,0,1,2.8292-2.0682,1.3337,1.3337,0,0,0-1.2875-2.3361Zm-1.6858-4.442c-.85.9831-.2679,3.5325-.1157,3.0651a5.4212,5.4212,0,0,1,1.3687-1.926.8483.8483,0,1,0-1.253-1.1391Z" style="fill:#e12229"/><path d="M2.973,54.2312h8.3535v1.0034H4.1517v6.7232h6.5727v1.0034H4.1517v8.1777H2.973Z" style="fill:#808285"/><path d="M15.7405,54.2312V71.1389H14.5618V54.2312Z" style="fill:#808285"/><path d="M19.8548,54.4573a27.8016,27.8016,0,0,1,4.3145-.3516c3.0854,0,5.4184.8281,6.8232,2.3081A7.9972,7.9972,0,0,1,33.1,62.2585,9.312,9.312,0,0,1,30.8924,68.68c-1.5054,1.6309-4.0391,2.584-7.3,2.584a36.0469,36.0469,0,0,1-3.7378-.15Zm1.1787,15.6782a21.3432,21.3432,0,0,0,2.7344.1255c5.4184,0,8.1025-3.0352,8.1025-7.9273.0254-4.29-2.3579-7.2246-7.7763-7.2246a17.94,17.94,0,0,0-3.0606.251Z" style="fill:#808285"/><path d="M36.0843,54.2312H37.263V70.1355h7.5508v1.0034h-8.73Z" style="fill:#808285"/><path d="M55.2742,62.7351H48.5008v7.4H56.077v1.0034H47.3221V54.2312h8.3535v1.0034H48.5008v6.4971h6.7734Z" style="fill:#808285"/><line x1="68.2129" x2="68.2129" y2="70.6111" style="fill:none;stroke:#d1d3d4;stroke-miterlimit:10;stroke-width:0.25px"/></g></g></svg> \ No newline at end of file diff --git a/fidle/img/ender.svg b/fidle/img/ender.svg new file mode 100644 index 0000000..37a20d1 --- /dev/null +++ b/fidle/img/ender.svg @@ -0,0 +1,30 @@ +<?xml version="1.0" encoding="UTF-8"?> +<svg id="b" data-name="Calque 2" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 752.58 36.04"> + <g id="c" data-name="Iconographie"> + <g> + <g> + <g> + <g> + <path d="m17.27,29.6c-1.91.52-2.55.5-2.75.45.14-.29.82-.94,1.54-1.44.17-.12.21-.36.09-.53-.12-.17-.35-.21-.52-.09-1.01.72-2.15,1.72-1.83,2.41.12.26.37.44.98.44.55,0,1.39-.14,2.68-.5.2-.05.32-.26.26-.47-.05-.2-.26-.32-.46-.27Z" style="fill: none;"/> + <path d="m21.59,12.35c.36.2,1.12-.24,1.58.42.24.34-.5-1.55-.86-1.75s-.81-.07-1.01.3c-.2.37-.07.83.29,1.03Z" style="fill: #e12229;"/> + <path d="m20.83,25.24c-.15-.18-.13-.46.05-.62.18-.16.45-.14.61.05,2.27,2.71,5.94,3.43,9.6,1.89.37-1.04.56-2.13.58-3.2,0-2.9-1.38-5.59-3.73-7.2-1.09-.75-3.6-1.75-6.9-1.19-2.42.41-4.94,2.28-5.92,3.12-.59.51-7.64,7.32-10.58,5.96-1.99-.92,1.56-4.26-.07-7.32-.04-.08-.16-.09-.22-.01-.83,1.14-1.64,2.51-2.74,1.89-.49-.27-.94-1.15-1.28-1.71-.07-.11-.24-.06-.24.08.13,3.84,1.95,6.66,3.41,8.45,2.53,3.09,6.97,6.69,14.15,7.42,7.39.68,11.33-1.85,13.04-5.17-1.11.38-2.22.56-3.29.56-2.53,0-4.84-1.03-6.49-3Zm-3.37,5.09c-1.29.35-2.13.5-2.68.5-.61,0-.86-.18-.98-.44-.32-.69.82-1.69,1.83-2.41.17-.12.4-.08.52.09.12.17.08.41-.09.53-.72.51-1.4,1.15-1.54,1.44.2.04.84.07,2.75-.45.2-.05.4.07.46.27.05.2-.06.41-.26.47Z" style="fill: #e12229;"/> + <path d="m24.24,13.91c.46-.66,1.22-.98,1.58-1.18s.49-.67.29-1.03c-.2-.37-.66-.51-1.01-.3-.93.56-1.1,2.86-.86,2.51Z" style="fill: #e12229;"/> + <path d="m24.09,10.61c.16-.51.58-.89.77-1.1.19-.21.18-.53-.01-.71-.19-.18-.51-.16-.69.05-.48.56-.15,2.02-.06,1.75Z" style="fill: #e12229;"/> + </g> + <g> + <ellipse cx="26.1" cy="22.56" rx="1.38" ry="1.52" style="fill: #fff;"/> + <ellipse cx="26.66" cy="22.89" rx=".93" ry=".95" style="fill: #e12229;"/> + </g> + </g> + <g> + <path d="m38.15,17.13h6.81v.82h-5.83v5.48h5.36v.82h-5.36v6.67h-.98v-13.79Z" style="fill: #808184;"/> + <path d="m48.59,17.13v13.79h-.98v-13.79h.98Z" style="fill: #808184;"/> + <path d="m51.92,17.33c1.06-.18,2.23-.31,3.54-.31,2.52,0,4.42.67,5.57,1.88,1.1,1.15,1.72,2.7,1.72,4.79s-.55,3.85-1.8,5.22c-1.21,1.33-3.29,2.11-5.95,2.11-1.06,0-2.07-.02-3.07-.12v-13.57Zm.98,12.77c.55.08,1.35.1,2.23.1,4.42,0,6.61-2.48,6.61-6.47.04-3.5-1.9-5.89-6.34-5.89-1.06,0-1.9.1-2.5.22v12.03Z" style="fill: #808184;"/> + <path d="m65.17,17.13h.98v12.97h6.14v.82h-7.12v-13.79Z" style="fill: #808184;"/> + <path d="m80.83,24.07h-5.5v6.04h6.16v.82h-7.14v-13.79h6.81v.82h-5.83v5.3h5.5v.82Z" style="fill: #808184;"/> + </g> + </g> + <line y1=".12" x2="752.58" y2=".12" style="fill: none; stroke: #e6e7e7; stroke-miterlimit: 10; stroke-width: .25px;"/> + </g> + </g> +</svg> \ No newline at end of file diff --git a/fidle/img/header.svg b/fidle/img/header.svg new file mode 100644 index 0000000..6bb8375 --- /dev/null +++ b/fidle/img/header.svg @@ -0,0 +1,125 @@ +<?xml version="1.0" encoding="UTF-8"?> +<svg id="b" data-name="Calque 2" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 763.76 92.03"> + <g id="c" data-name="Iconographie"> + <g> + <g> + <g> + <g> + <path d="m30.06,36.41c-3.33.91-4.45.86-4.79.79.25-.51,1.44-1.63,2.68-2.52.29-.21.37-.62.16-.92-.2-.3-.61-.37-.9-.16-1.76,1.25-3.74,2.99-3.19,4.2.21.46.65.76,1.7.76.96,0,2.42-.25,4.67-.87.35-.09.55-.46.46-.81-.09-.35-.45-.56-.8-.47Z" style="fill: none;"/> + <path d="m37.59,6.38c.63.35,1.95-.41,2.76.73.42.59-.87-2.7-1.5-3.05s-1.42-.12-1.76.52c-.35.64-.12,1.45.51,1.8Z" style="fill: #e12229;"/> + <path d="m36.26,28.82c-.27-.32-.23-.81.08-1.08.32-.28.79-.24,1.06.09,3.95,4.72,10.35,5.98,16.72,3.29.65-1.82.97-3.72,1.01-5.58,0-5.05-2.4-9.72-6.49-12.53-1.9-1.3-6.27-3.04-12.02-2.07-4.22.71-8.6,3.97-10.31,5.43-1.03.88-13.3,12.74-18.41,10.38-3.46-1.6,2.72-7.42-.12-12.74-.08-.14-.28-.16-.38-.03-1.44,1.99-2.85,4.37-4.77,3.3-.86-.48-1.63-2-2.23-2.97-.12-.19-.42-.1-.41.13.22,6.68,3.39,11.6,5.94,14.71,4.41,5.38,12.14,11.65,24.63,12.92,12.87,1.19,19.73-3.22,22.71-9.01-1.94.65-3.87.98-5.73.98-4.4,0-8.42-1.79-11.29-5.23Zm-5.86,8.87c-2.25.62-3.72.87-4.67.87-1.05,0-1.49-.31-1.7-.76-.55-1.21,1.43-2.95,3.19-4.2.3-.21.7-.13.9.16.2.3.13.71-.16.92-1.25.89-2.43,2.01-2.68,2.52.34.08,1.46.12,4.79-.79.35-.1.7.11.8.47.09.35-.11.72-.46.81Z" style="fill: #e12229;"/> + <path d="m42.2,9.1c.8-1.15,2.13-1.7,2.76-2.05s.86-1.16.51-1.8c-.35-.64-1.15-.89-1.76-.52-1.62.97-1.92,4.97-1.5,4.38Z" style="fill: #e12229;"/> + <path d="m41.94,3.35c.29-.89,1.01-1.55,1.33-1.91.33-.37.32-.91-.02-1.23-.34-.31-.88-.28-1.2.1-.83.98-.26,3.51-.11,3.05Z" style="fill: #e12229;"/> + </g> + <g> + <ellipse cx="45.44" cy="24.15" rx="2.4" ry="2.64" style="fill: #fff;"/> + <ellipse cx="46.41" cy="24.74" rx="1.62" ry="1.65" style="fill: #e12229;"/> + </g> + </g> + <g> + <path d="m2.87,49.65h8.13v.98h-6.96v6.55h6.4v.98h-6.4v7.96h-1.17v-16.47Z" style="fill: #808184;"/> + <path d="m15.33,49.65v16.47h-1.17v-16.47h1.17Z" style="fill: #808184;"/> + <path d="m19.31,49.89c1.27-.22,2.66-.37,4.23-.37,3,0,5.28.81,6.64,2.25,1.32,1.37,2.05,3.22,2.05,5.72s-.66,4.59-2.15,6.23c-1.44,1.59-3.93,2.52-7.11,2.52-1.27,0-2.47-.02-3.66-.15v-16.2Zm1.17,15.24c.66.1,1.61.12,2.66.12,5.28,0,7.89-2.96,7.89-7.72.05-4.18-2.27-7.04-7.57-7.04-1.27,0-2.27.12-2.98.27v14.36Z" style="fill: #808184;"/> + <path d="m35.12,49.65h1.17v15.49h7.33v.98h-8.5v-16.47Z" style="fill: #808184;"/> + <path d="m53.81,57.93h-6.57v7.21h7.35v.98h-8.53v-16.47h8.13v.98h-6.96v6.33h6.57v.98Z" style="fill: #808184;"/> + </g> + </g> + <g> + <path d="m93.35,11.39h13.99v3h-10.44v9.64h9.68v2.96h-9.68v12.89h-3.55V11.39Z"/> + <path d="m118.33,40.3c-4.99,0-8.88-3.8-8.88-10.52,0-7.06,4.14-10.78,9.17-10.78s8.88,3.97,8.88,10.52c0,7.69-4.78,10.78-9.13,10.78h-.04Zm.17-2.71c3.34,0,5.37-3.76,5.37-7.99,0-3.38-1.39-7.9-5.37-7.9s-5.41,4.31-5.41,7.99c0,4.14,1.99,7.9,5.37,7.9h.04Z"/> + <path d="m131.85,25.47c0-2.2-.08-4.27-.17-6.04h3.13l.17,3.8h.13c.85-2.54,2.96-4.23,5.24-4.23.38,0,.63.04.93.08v3.47c-.3-.08-.63-.08-1.06-.08-2.45,0-4.23,1.94-4.69,4.61-.08.51-.13,1.14-.13,1.78v11.03h-3.55v-14.41Z"/> + <path d="m144.66,24.87c0-2.2-.08-3.76-.17-5.45h3.13l.17,3.08h.08c1.06-1.86,2.87-3.51,5.92-3.51,2.49,0,4.48,1.48,5.33,3.8h.08c.59-1.1,1.4-1.99,2.2-2.58,1.1-.8,2.32-1.23,3.97-1.23,2.75,0,6.21,1.86,6.21,8.37v12.51h-3.51v-11.92c0-3.59-1.14-6-3.97-6-1.9,0-3.38,1.44-3.97,3.25-.17.55-.25,1.18-.25,1.86v12.81h-3.51v-12.6c0-2.96-1.14-5.33-3.8-5.33-2.03,0-3.64,1.65-4.14,3.59-.17.55-.25,1.18-.25,1.82v12.51h-3.51v-15Z"/> + <path d="m190.51,35.06c0,1.69.04,3.42.3,4.82h-3.21l-.3-2.45h-.13c-1.06,1.56-3,2.87-5.62,2.87-3.68,0-5.79-2.75-5.79-5.88,0-4.99,4.27-7.57,11.2-7.52v-.51c0-1.99-.55-4.73-4.23-4.69-1.73,0-3.47.51-4.73,1.39l-.8-2.45c1.44-.93,3.72-1.65,6.13-1.65,5.58,0,7.19,3.8,7.19,8.07v7.99Zm-3.47-5.66c-3.34-.04-7.73.55-7.73,4.65,0,2.49,1.52,3.55,3.17,3.55,2.32,0,3.89-1.48,4.4-3.3.13-.38.17-.8.17-1.18v-3.72Z"/> + <path d="m200.15,14.56v4.86h4.9v2.71h-4.9v11.37c0,2.58.8,3.8,2.62,3.8.85,0,1.4-.08,1.9-.21l.13,2.71c-.68.25-1.69.47-3.04.47-1.56,0-2.92-.51-3.76-1.48-.93-1.06-1.35-2.75-1.35-5.28v-11.37h-2.96v-2.71h2.96v-3.68l3.51-1.18Z"/> + <path d="m210.59,16.04c-1.27,0-2.16-.97-2.16-2.24s.93-2.28,2.2-2.28,2.2.97,2.2,2.28-.85,2.24-2.2,2.24h-.04Zm-1.69,23.84v-20.46h3.55v20.46h-3.55Z"/> + <path d="m225.67,40.3c-4.99,0-8.88-3.8-8.88-10.52,0-7.06,4.14-10.78,9.17-10.78s8.88,3.97,8.88,10.52c0,7.69-4.78,10.78-9.13,10.78h-.04Zm.17-2.71c3.34,0,5.37-3.76,5.37-7.99,0-3.38-1.4-7.9-5.37-7.9s-5.41,4.31-5.41,7.99c0,4.14,1.99,7.9,5.37,7.9h.04Z"/> + <path d="m239.24,24.87c0-2.2-.08-3.76-.17-5.45h3.13l.21,3.08h.08c.97-1.82,3.13-3.51,6.13-3.51,2.79,0,6.51,1.82,6.51,8.2v12.68h-3.55v-12.21c0-3.04-1.01-5.71-4.14-5.71-2.11,0-3.85,1.56-4.44,3.68-.13.46-.21,1.1-.21,1.73v12.51h-3.55v-15Z"/> + <path d="m272.76,11.39v28.49h-3.55V11.39h3.55Z"/> + <path d="m278.97,24.87c0-2.2-.08-3.76-.17-5.45h3.13l.21,3.08h.08c.97-1.82,3.13-3.51,6.13-3.51,2.79,0,6.51,1.82,6.51,8.2v12.68h-3.55v-12.21c0-3.04-1.01-5.71-4.14-5.71-2.11,0-3.85,1.56-4.44,3.68-.13.46-.21,1.1-.21,1.73v12.51h-3.55v-15Z"/> + <path d="m304.62,14.56v4.86h4.9v2.71h-4.9v11.37c0,2.58.8,3.8,2.62,3.8.85,0,1.4-.08,1.9-.21l.13,2.71c-.68.25-1.69.47-3.04.47-1.56,0-2.92-.51-3.76-1.48-.93-1.06-1.35-2.75-1.35-5.28v-11.37h-2.96v-2.71h2.96v-3.68l3.51-1.18Z"/> + <path d="m313.33,25.47c0-2.2-.08-4.27-.17-6.04h3.13l.17,3.8h.13c.85-2.54,2.96-4.23,5.24-4.23.38,0,.63.04.93.08v3.47c-.3-.08-.63-.08-1.06-.08-2.45,0-4.23,1.94-4.69,4.61-.08.51-.13,1.14-.13,1.78v11.03h-3.55v-14.41Z"/> + <path d="m333.23,40.3c-4.99,0-8.88-3.8-8.88-10.52,0-7.06,4.14-10.78,9.17-10.78s8.88,3.97,8.88,10.52c0,7.69-4.78,10.78-9.13,10.78h-.04Zm.17-2.71c3.34,0,5.37-3.76,5.37-7.99,0-3.38-1.4-7.9-5.37-7.9s-5.41,4.31-5.41,7.99c0,4.14,1.99,7.9,5.37,7.9h.04Z"/> + <path d="m362.82,10.08v24.64c0,1.69.08,3.8.17,5.16h-3.13l-.21-3.25h-.13c-.97,2.03-3.17,3.68-6.13,3.68-4.52,0-7.95-4.06-7.95-10.44,0-6.93,3.85-10.86,8.28-10.86,2.66,0,4.56,1.23,5.45,2.87h.08v-11.79h3.55Zm-3.55,17.5c0-.46-.04-1.06-.13-1.52-.47-2.28-2.24-4.18-4.69-4.18-3.51,0-5.41,3.51-5.41,7.86s1.78,7.65,5.33,7.65c2.03,0,4.02-1.35,4.73-4.18.13-.42.17-.93.17-1.52v-4.1Z"/> + <path d="m384.12,34.3c0,2.16.08,3.97.17,5.58h-3.13l-.21-2.96h-.08c-.85,1.44-2.83,3.38-6.04,3.38s-6.42-2.07-6.42-8.62v-12.26h3.55v11.67c0,3.68,1.01,6.25,4.02,6.25,2.32,0,3.8-1.78,4.31-3.21.17-.55.3-1.18.3-1.9v-12.81h3.55v14.88Z"/> + <path d="m403.01,39.2c-.89.46-2.75,1.06-5.11,1.06-5.62,0-9.38-4.02-9.38-10.44s3.93-10.82,10.14-10.82c1.86,0,3.59.46,4.48.97l-.8,2.75c-.76-.38-1.99-.85-3.72-.85-4.27,0-6.47,3.63-6.47,7.82,0,4.73,2.66,7.65,6.42,7.65,1.73,0,2.92-.42,3.89-.84l.55,2.71Z"/> + <path d="m411.8,14.56v4.86h4.9v2.71h-4.9v11.37c0,2.58.8,3.8,2.62,3.8.85,0,1.39-.08,1.9-.21l.13,2.71c-.68.25-1.69.47-3.04.47-1.56,0-2.92-.51-3.76-1.48-.93-1.06-1.35-2.75-1.35-5.28v-11.37h-2.96v-2.71h2.96v-3.68l3.51-1.18Z"/> + <path d="m422.24,16.04c-1.27,0-2.16-.97-2.16-2.24s.93-2.28,2.2-2.28,2.2.97,2.2,2.28-.85,2.24-2.2,2.24h-.04Zm-1.69,23.84v-20.46h3.55v20.46h-3.55Z"/> + <path d="m437.33,40.3c-4.99,0-8.88-3.8-8.88-10.52,0-7.06,4.14-10.78,9.17-10.78s8.88,3.97,8.88,10.52c0,7.69-4.78,10.78-9.13,10.78h-.04Zm.17-2.71c3.34,0,5.37-3.76,5.37-7.99,0-3.38-1.39-7.9-5.37-7.9s-5.41,4.31-5.41,7.99c0,4.14,1.99,7.9,5.37,7.9h.04Z"/> + <path d="m450.89,24.87c0-2.2-.08-3.76-.17-5.45h3.13l.21,3.08h.08c.97-1.82,3.13-3.51,6.13-3.51,2.79,0,6.51,1.82,6.51,8.2v12.68h-3.55v-12.21c0-3.04-1.01-5.71-4.14-5.71-2.11,0-3.85,1.56-4.44,3.68-.13.46-.21,1.1-.21,1.73v12.51h-3.55v-15Z"/> + <path d="m494,35.06c0,1.69.04,3.42.3,4.82h-3.21l-.3-2.45h-.13c-1.06,1.56-3,2.87-5.62,2.87-3.68,0-5.79-2.75-5.79-5.88,0-4.99,4.27-7.57,11.2-7.52v-.51c0-1.99-.55-4.73-4.23-4.69-1.73,0-3.47.51-4.73,1.39l-.8-2.45c1.44-.93,3.72-1.65,6.13-1.65,5.58,0,7.19,3.8,7.19,8.07v7.99Zm-3.47-5.66c-3.34-.04-7.73.55-7.73,4.65,0,2.49,1.52,3.55,3.17,3.55,2.32,0,3.89-1.48,4.4-3.3.13-.38.17-.8.17-1.18v-3.72Z"/> + <path d="m515.22,34.3c0,2.16.08,3.97.17,5.58h-3.13l-.21-2.96h-.08c-.85,1.44-2.83,3.38-6.04,3.38s-6.42-2.07-6.42-8.62v-12.26h3.55v11.67c0,3.68,1.01,6.25,4.02,6.25,2.32,0,3.8-1.78,4.31-3.21.17-.55.3-1.18.3-1.9v-12.81h3.55v14.88Z"/> + <path d="m529.37,11.77c1.94-.38,4.27-.59,6.76-.59,4.61,0,7.9,1.1,10.1,3.13,2.45,2.2,3.97,5.62,3.97,10.61,0,5.24-1.48,8.92-3.89,11.37-2.45,2.49-6.34,3.8-11.2,3.8-2.24,0-4.14-.13-5.75-.3V11.77Zm3.55,25.28c.8.13,1.9.13,2.96.13,6.59,0,10.61-3.85,10.61-12.13.04-6.89-3.3-11.03-10.1-11.03-1.48,0-2.66.13-3.47.3v22.74Z"/> + <path d="m556.89,30.2c.04,5.41,3.04,7.27,6.34,7.27,2.28,0,3.8-.42,4.99-.97l.59,2.58c-1.23.59-3.34,1.18-6.09,1.18-5.88,0-9.3-4.1-9.3-10.31s3.42-10.95,8.83-10.95c5.92,0,7.61,5.41,7.61,9.43,0,.8-.04,1.35-.13,1.78h-12.85Zm9.55-2.58c.04-2.87-1.1-6-4.52-6s-4.73,3.34-4.99,6h9.51Z"/> + <path d="m576.33,30.2c.04,5.41,3.04,7.27,6.34,7.27,2.28,0,3.8-.42,4.99-.97l.59,2.58c-1.23.59-3.34,1.18-6.09,1.18-5.88,0-9.3-4.1-9.3-10.31s3.42-10.95,8.83-10.95c5.92,0,7.61,5.41,7.61,9.43,0,.8-.04,1.35-.13,1.78h-12.85Zm9.55-2.58c.04-2.87-1.1-6-4.52-6s-4.73,3.34-4.99,6h9.51Z"/> + <path d="m593.61,26.02c0-2.79-.08-4.82-.17-6.59h3.17l.21,3.21h.08c1.39-2.37,3.59-3.63,6.38-3.63,4.56,0,7.86,4.06,7.86,10.4,0,7.44-4.1,10.91-8.41,10.91-2.49,0-4.48-1.18-5.49-2.96h-.08v10.82h-3.55v-22.15Zm3.55,5.79c0,.55.04,1.06.17,1.56.63,2.66,2.66,4.06,4.78,4.06,3.59,0,5.45-3.38,5.45-7.86,0-4.14-1.82-7.65-5.37-7.65-2.2,0-4.27,1.82-4.82,4.31-.13.46-.21,1.01-.21,1.52v4.06Z"/> + <path d="m623.96,11.39h3.55v25.49h10.9v3h-14.46V11.39Z"/> + <path d="m643.65,30.2c.04,5.41,3.04,7.27,6.34,7.27,2.28,0,3.8-.42,4.99-.97l.59,2.58c-1.23.59-3.34,1.18-6.09,1.18-5.88,0-9.3-4.1-9.3-10.31s3.42-10.95,8.83-10.95c5.92,0,7.61,5.41,7.61,9.43,0,.8-.04,1.35-.13,1.78h-12.85Zm9.55-2.58c.04-2.87-1.1-6-4.52-6s-4.73,3.34-4.99,6h9.51Z"/> + <path d="m674.25,35.06c0,1.69.04,3.42.3,4.82h-3.21l-.3-2.45h-.13c-1.06,1.56-3,2.87-5.62,2.87-3.68,0-5.79-2.75-5.79-5.88,0-4.99,4.27-7.57,11.2-7.52v-.51c0-1.99-.55-4.73-4.23-4.69-1.73,0-3.47.51-4.73,1.39l-.8-2.45c1.44-.93,3.72-1.65,6.13-1.65,5.58,0,7.19,3.8,7.19,8.07v7.99Zm-3.47-5.66c-3.34-.04-7.73.55-7.73,4.65,0,2.49,1.52,3.55,3.17,3.55,2.32,0,3.89-1.48,4.4-3.3.13-.38.17-.8.17-1.18v-3.72Z"/> + <path d="m679.83,25.47c0-2.2-.08-4.27-.17-6.04h3.13l.17,3.8h.13c.85-2.54,2.96-4.23,5.24-4.23.38,0,.63.04.93.08v3.47c-.3-.08-.63-.08-1.06-.08-2.45,0-4.23,1.94-4.69,4.61-.08.51-.13,1.14-.13,1.78v11.03h-3.55v-14.41Z"/> + <path d="m692.68,24.87c0-2.2-.08-3.76-.17-5.45h3.13l.21,3.08h.08c.97-1.82,3.13-3.51,6.13-3.51,2.79,0,6.51,1.82,6.51,8.2v12.68h-3.55v-12.21c0-3.04-1.01-5.71-4.14-5.71-2.11,0-3.85,1.56-4.44,3.68-.13.46-.21,1.1-.21,1.73v12.51h-3.55v-15Z"/> + <path d="m715.92,16.04c-1.27,0-2.16-.97-2.16-2.24s.93-2.28,2.2-2.28,2.2.97,2.2,2.28-.85,2.24-2.2,2.24h-.04Zm-1.69,23.84v-20.46h3.55v20.46h-3.55Z"/> + <path d="m723.49,24.87c0-2.2-.08-3.76-.17-5.45h3.13l.21,3.08h.08c.97-1.82,3.13-3.51,6.13-3.51,2.79,0,6.51,1.82,6.51,8.2v12.68h-3.55v-12.21c0-3.04-1.01-5.71-4.14-5.71-2.11,0-3.85,1.56-4.44,3.68-.13.46-.21,1.1-.21,1.73v12.51h-3.55v-15Z"/> + <path d="m760.93,37c0,4.95-.89,7.52-2.62,9.21-1.78,1.69-4.35,2.37-6.85,2.37-2.24,0-4.65-.55-6.13-1.48l.89-2.75c1.18.72,3.09,1.39,5.33,1.39,3.47,0,5.88-1.86,5.88-6.85v-2.24h-.08c-.97,1.82-3,3.17-5.71,3.17-4.65,0-7.9-4.27-7.9-10.06,0-7.1,4.18-10.78,8.37-10.78,3.13,0,4.78,1.73,5.62,3.25h.08l.17-2.83h3.13c-.08,1.44-.17,3.17-.17,5.83v11.75Zm-3.55-9.81c0-.59-.04-1.14-.17-1.61-.59-1.94-2.07-3.72-4.56-3.72-3.17,0-5.33,3.04-5.33,7.74,0,4.4,1.94,7.44,5.33,7.44,1.9,0,3.72-1.14,4.48-3.55.21-.63.25-1.31.25-1.94v-4.35Z"/> + </g> + <g> + <path d="m94.48,52.36h.77v5.1h.03c.5-1,1.38-1.52,2.57-1.52,1.73,0,3.04,1.55,3.04,4.1,0,2.9-1.65,4.24-3.19,4.24-1.2,0-1.98-.55-2.5-1.52h-.05l-.05,1.35h-.68c.03-.48.07-1.27.07-1.78v-9.97Zm.77,8.76c0,.18.02.38.07.55.33,1.33,1.28,1.97,2.29,1.97,1.65,0,2.5-1.57,2.5-3.57,0-1.8-.83-3.47-2.47-3.47-1.05,0-2.02.87-2.3,2.05-.05.2-.08.42-.08.67v1.8Z"/> + <path d="m102.48,56.12l1.83,5.12c.22.62.4,1.25.53,1.77h.03c.15-.48.35-1.13.55-1.8l1.72-5.09h.82l-2.02,5.54c-.85,2.44-1.48,4.12-2.52,5.17-.58.57-1.13.85-1.33.93l-.3-.65c.4-.18.9-.5,1.33-.98.38-.43.85-1.1,1.17-1.88.07-.2.08-.28.08-.38,0-.08-.02-.18-.07-.33l-2.65-7.41h.82Z"/> + <path d="m119.48,63.77c-.5.27-1.43.5-2.6.5-2.42,0-4.75-1.63-4.75-5.69,0-3.7,2.13-5.85,5.07-5.85,1.2,0,1.92.25,2.22.42l-.25.67c-.48-.23-1.13-.42-1.98-.42-2.48,0-4.25,1.75-4.25,5.15s1.65,5.04,4.17,5.04c.85,0,1.63-.17,2.15-.45l.23.63Z"/> + <path d="m121.21,64.12v-11.24h.75l3.75,6.39c.8,1.42,1.43,2.6,1.95,3.77l.03-.02c-.12-1.7-.13-2.94-.13-4.74v-5.4h.75v11.24h-.73l-3.74-6.4c-.73-1.28-1.4-2.47-1.95-3.77l-.05.02c.1,1.48.12,2.7.12,4.75v5.4h-.75Z"/> + <path d="m130.85,53.03c.62-.15,1.45-.23,2.15-.23,1.33,0,2.18.3,2.79.88.48.48.77,1.23.77,2.05,0,1.53-.85,2.47-2.08,2.93v.03c.88.27,1.42,1.12,1.68,2.48.35,1.73.57,2.54.77,2.94h-.8c-.17-.33-.4-1.33-.68-2.77-.32-1.62-.98-2.33-2.4-2.38h-1.42v5.15h-.77v-11.09Zm.77,5.32h1.43c1.63,0,2.72-.98,2.72-2.49,0-1.67-1.07-2.43-2.79-2.43-.63,0-1.1.07-1.37.13v4.79Z"/> + <path d="m138.25,62.96c.55.37,1.35.65,2.17.65,1.55,0,2.52-.93,2.52-2.32,0-1.23-.65-1.99-2.05-2.64-1.4-.58-2.65-1.52-2.65-3.1s1.3-2.82,3.07-2.82c.95,0,1.67.27,2,.5l-.3.65c-.28-.2-.93-.48-1.77-.48-1.63,0-2.23,1.12-2.23,2,0,1.28.72,1.88,2.15,2.55,1.67.82,2.55,1.7,2.55,3.27,0,1.7-1.2,3.05-3.34,3.05-.88,0-1.9-.3-2.4-.68l.28-.63Z"/> + <path d="m147.77,64.79l4.74-12.07h.77l-4.75,12.07h-.75Z"/> + <path d="m166.52,58.77c-.1-1.7-.22-3.69-.18-4.94h-.07c-.37,1.23-.77,2.55-1.35,4.2l-2.15,6.09h-.48l-2.02-5.9c-.6-1.75-1.03-3.12-1.33-4.39h-.05c-.02,1.33-.1,3.22-.22,5.09l-.3,5.2h-.77l.75-11.24h.88l2.17,6.27c.48,1.47.85,2.55,1.15,3.7h.05c.27-1.12.62-2.17,1.15-3.69l2.2-6.29h.93l.72,11.24h-.77l-.32-5.35Z"/> + <path d="m170.6,52.88v11.24h-.77v-11.24h.77Z"/> + <path d="m174.39,60.2l-1.23,3.92h-.78l3.64-11.24h.75l3.64,11.24h-.8l-1.25-3.92h-3.95Zm3.74-.65l-1.18-3.59c-.25-.8-.4-1.43-.55-2.15h-.05c-.15.73-.33,1.38-.55,2.13l-1.18,3.6h3.52Z"/> + <path d="m182.84,52.88v11.24h-.77v-11.24h.77Z"/> + <path d="m187.44,64.79l4.74-12.07h.77l-4.75,12.07h-.75Z"/> + <path d="m198.32,52.88v7.05c0,2.69,1.18,3.69,2.64,3.69,1.65,0,2.77-1.1,2.77-3.69v-7.05h.77v6.97c0,3.3-1.67,4.44-3.57,4.44-1.77,0-3.37-1.03-3.37-4.27v-7.14h.77Z"/> + <path d="m214.18,63.71c-.53.22-1.57.53-2.84.53s-2.5-.33-3.52-1.35c-.87-.88-1.47-2.38-1.47-4.29,0-3.57,2.08-5.84,5.29-5.84,1.03,0,1.87.23,2.27.43l-.25.65c-.48-.23-1.13-.42-2.07-.42-2.58,0-4.44,1.77-4.44,5.05s1.75,5.09,4.3,5.09c1,0,1.62-.15,1.95-.32v-4.07h-2.35v-.63h3.12v5.15Z"/> + <path d="m217.61,60.2l-1.23,3.92h-.78l3.64-11.24h.75l3.64,11.24h-.8l-1.25-3.92h-3.95Zm3.74-.65l-1.18-3.59c-.25-.8-.4-1.43-.55-2.15h-.05c-.15.73-.33,1.38-.55,2.13l-1.18,3.6h3.52Z"/> + </g> + <line x1="2.28" y1="86.29" x2="576.88" y2="86.29" style="fill: none; stroke: #e6e7e7; stroke-miterlimit: 10; stroke-width: .38px;"/> + <line x1="664.99" y1="86.29" x2="759.38" y2="86.29" style="fill: none; stroke: #e6e7e7; stroke-miterlimit: 10; stroke-width: .38px;"/> + <g> + <g> + <circle cx="752.85" cy="58.46" r="5.84" style="fill: #fff;"/> + <path d="m752.95,52.01c1.8,0,3.32.62,4.57,1.87,1.25,1.24,1.87,2.76,1.87,4.55s-.61,3.3-1.84,4.52c-1.29,1.27-2.83,1.9-4.6,1.9s-3.24-.63-4.52-1.89c-1.26-1.26-1.89-2.77-1.89-4.53s.63-3.27,1.89-4.55c1.25-1.25,2.75-1.87,4.52-1.87Zm.02,1.16c-1.46,0-2.7.52-3.71,1.55-1.05,1.06-1.57,2.3-1.57,3.72s.52,2.66,1.56,3.69c1.04,1.04,2.28,1.56,3.72,1.56s2.68-.52,3.74-1.57c1.01-.98,1.51-2.21,1.51-3.68s-.51-2.71-1.54-3.72c-1.02-1.03-2.26-1.55-3.72-1.55Zm-2.87,4.36c.12-.79.44-1.4.95-1.83.51-.43,1.14-.65,1.87-.65,1.01,0,1.81.33,2.41.97.6.65.89,1.48.89,2.5s-.31,1.81-.93,2.46c-.62.65-1.42.98-2.41.98-.73,0-1.35-.22-1.88-.65-.53-.44-.84-1.06-.95-1.86h1.62c.04.78.51,1.17,1.41,1.17.45,0,.81-.2,1.09-.58.28-.39.41-.91.41-1.56s-.13-1.2-.38-1.55c-.25-.36-.62-.53-1.09-.53-.86,0-1.34.38-1.45,1.14h.47l-1.27,1.27-1.27-1.27h.5Z"/> + </g> + <g> + <path d="m713.34,58.43c0,.99-.27,1.99-.79,2.86-.5.87-1.24,1.6-2.13,2.11-.92.51-1.91.77-2.95.77s-2.05-.27-2.95-.77c-.89-.51-1.63-1.24-2.15-2.11-.52-.87-.77-1.86-.77-2.86s.25-2.01.77-2.88c.52-.87,1.26-1.6,2.15-2.11.89-.51,1.91-.77,2.95-.77s2.03.27,2.95.77c.89.51,1.63,1.24,2.13,2.11.52.87.79,1.86.79,2.88h0Z" style="fill: #fff; fill-rule: evenodd;"/> + <path d="m707.4,52.05c1.83,0,3.37.63,4.65,1.86.62.61,1.09,1.28,1.41,2.06.3.77.47,1.6.47,2.47s-.15,1.67-.47,2.45c-.32.75-.79,1.43-1.39,2.01-.64.63-1.36,1.09-2.18,1.43-.79.31-1.63.48-2.5.48s-1.68-.17-2.48-.48c-.79-.31-1.49-.8-2.13-1.4-.62-.61-1.09-1.31-1.41-2.06-.32-.77-.5-1.57-.5-2.42s.17-1.67.5-2.45.82-1.48,1.44-2.08c1.24-1.24,2.77-1.86,4.58-1.86h0Zm.02,1.16c-1.49,0-2.75.51-3.76,1.53-.52.51-.92,1.07-1.19,1.7-.27.65-.4,1.31-.4,2.01s.12,1.33.4,1.96c.27.63.67,1.21,1.19,1.7.52.51,1.09.87,1.73,1.14.64.27,1.31.39,2.03.39s1.39-.12,2.03-.39c.67-.27,1.24-.65,1.78-1.14,1.01-.99,1.51-2.2,1.51-3.66,0-.73-.12-1.38-.4-2.01-.25-.63-.64-1.19-1.14-1.7-1.06-1.02-2.3-1.53-3.79-1.53h0Zm-.07,4.17l-.89.44c-.07-.19-.2-.31-.32-.39-.15-.07-.27-.12-.4-.12-.57,0-.87.36-.87,1.14,0,.34.07.61.22.8.15.22.37.31.64.31.4,0,.64-.17.82-.53l.79.39c-.17.31-.4.56-.72.73-.3.19-.62.27-.99.27-.59,0-1.06-.17-1.41-.51-.35-.36-.54-.85-.54-1.45s.2-1.09.54-1.45c.37-.36.82-.53,1.39-.53.79,0,1.39.29,1.73.92h0Zm3.76,0l-.87.44c-.1-.19-.22-.31-.35-.39-.15-.07-.27-.12-.4-.12-.57,0-.87.36-.87,1.14,0,.34.07.61.22.8.15.22.37.31.64.31.37,0,.64-.17.82-.53l.82.39c-.2.31-.42.56-.74.73-.3.19-.62.27-.99.27-.57,0-1.04-.17-1.41-.51-.35-.36-.52-.85-.52-1.45s.17-1.09.54-1.45c.35-.36.82-.53,1.36-.53.82,0,1.39.29,1.73.92h0Z" style="fill-rule: evenodd;"/> + </g> + <g> + <path d="m728.39,58.53c0,.99-.25,1.96-.77,2.81-.5.87-1.24,1.57-2.1,2.08-.89.48-1.88.75-2.9.75s-2.01-.27-2.87-.75c-.89-.51-1.61-1.21-2.13-2.08-.5-.85-.77-1.82-.77-2.81s.27-1.96.77-2.81c.52-.87,1.24-1.57,2.13-2.08.87-.48,1.86-.75,2.87-.75s2.01.27,2.9.75c.87.51,1.61,1.21,2.1,2.08.52.85.77,1.82.77,2.81h0Z" style="fill: #fff; fill-rule: evenodd;"/> + <path d="m722.6,52.05c1.83,0,3.37.63,4.63,1.84,1.26,1.24,1.91,2.76,1.91,4.55s-.62,3.27-1.88,4.46c-1.31,1.28-2.87,1.91-4.65,1.91s-3.32-.63-4.6-1.89c-1.29-1.24-1.93-2.74-1.93-4.48s.64-3.27,1.93-4.55c1.26-1.21,2.8-1.84,4.6-1.84h0Zm0,1.16c-1.49,0-2.72.51-3.76,1.53-1.06,1.07-1.58,2.3-1.58,3.71s.52,2.62,1.58,3.66c1.06,1.04,2.3,1.55,3.76,1.55s2.72-.53,3.81-1.57c1.04-.94,1.53-2.18,1.53-3.66s-.52-2.66-1.56-3.68c-1.04-1.02-2.3-1.53-3.79-1.53h0Zm1.76,3.63v2.62h-.74v3.1h-2.03v-3.1h-.74v-2.62c0-.12.05-.22.12-.29s.17-.12.3-.12h2.67c.12,0,.22.05.3.12s.12.17.12.29h0Zm-2.67-1.65c0-.58.3-.9.92-.9s.92.31.92.9-.32.9-.92.9-.92-.29-.92-.9h0Z" style="fill-rule: evenodd;"/> + </g> + <g> + <path d="m743.57,58.41c0,.99-.27,1.99-.79,2.88-.52.87-1.26,1.6-2.18,2.11-.89.51-1.91.77-2.95.77s-2.05-.27-2.95-.77-1.63-1.24-2.15-2.11c-.52-.9-.79-1.89-.79-2.88s.27-2.01.79-2.88c.52-.87,1.26-1.6,2.15-2.11.89-.51,1.91-.77,2.95-.77s2.05.27,2.95.77c.92.51,1.66,1.24,2.18,2.11.52.87.79,1.86.79,2.88h0Z" style="fill: #fff; fill-rule: evenodd;"/> + <path d="m742.41,53.89c-1.26-1.21-2.8-1.84-4.63-1.84s-3.34.63-4.6,1.84c-1.29,1.28-1.93,2.78-1.93,4.55s.64,3.22,1.93,4.48,2.82,1.89,4.6,1.89,3.34-.63,4.68-1.89c1.24-1.21,1.86-2.71,1.86-4.48s-.64-3.32-1.91-4.55h0Zm-.82,8.18c-1.06,1.04-2.35,1.57-3.81,1.57s-2.72-.53-3.76-1.55c-1.06-1.04-1.58-2.25-1.58-3.68,0-.58.1-1.16.27-1.7l1.73.75h-.12v.77h.62c0,.1-.02.19-.02.31v.17h-.59v.77h.72c.1.56.32,1.02.62,1.38.59.8,1.58,1.21,2.65,1.21.72,0,1.34-.19,1.71-.39l-.25-1.19c-.25.1-.74.27-1.26.27s-1.04-.17-1.41-.53c-.15-.19-.27-.44-.35-.75h2.38l3.37,1.45c-.22.41-.52.77-.89,1.11h0Zm-4.23-3.34h-.02.02Zm2.03-.48h.1v-.77h-1.86l-.74-.31c.05-.15.12-.27.22-.36.35-.41.82-.58,1.34-.58s.94.15,1.21.24l.32-1.21c-.4-.17-.99-.31-1.66-.31-1.01,0-1.91.41-2.52,1.09-.12.15-.25.34-.37.51l-2.13-.92c.2-.31.45-.61.72-.87,1.04-1.02,2.28-1.55,3.76-1.55s2.75.53,3.79,1.55c1.04.99,1.56,2.23,1.56,3.71,0,.46-.05.92-.15,1.36l-3.59-1.55h0Z" style="fill-rule: evenodd;"/> + </g> + </g> + <g> + <path d="m582.41,80.65h.55v3.58h.02c.15-.27.39-.55.69-.73.29-.18.62-.29,1.04-.29.54,0,1.76.3,1.76,2.22v3.62h-.55v-3.52c0-.96-.35-1.85-1.37-1.85-.73,0-1.32.52-1.54,1.25-.05.16-.06.31-.06.52v3.59h-.55v-8.4Z" style="fill: #9c9b9b;"/> + <path d="m588.92,82.08v1.25h1.47v.45h-1.47v3.77c0,.75.24,1.13.77,1.13.25,0,.43-.02.55-.07l.07.43c-.17.07-.39.12-.7.12-.39,0-.71-.12-.92-.38-.24-.29-.32-.73-.32-1.36v-3.63h-.89v-.45h.89v-1.02l.55-.23Z" style="fill: #9c9b9b;"/> + <path d="m592.27,82.08v1.25h1.47v.45h-1.47v3.77c0,.75.24,1.13.77,1.13.25,0,.43-.02.55-.07l.07.43c-.17.07-.39.12-.7.12-.39,0-.71-.12-.92-.38-.24-.29-.32-.73-.32-1.36v-3.63h-.89v-.45h.89v-1.02l.55-.23Z" style="fill: #9c9b9b;"/> + <path d="m594.88,85.15c0-.8-.02-1.33-.05-1.81h.52l.04.99h.02c.39-.74,1.02-1.11,1.85-1.11,1.28,0,2.19,1.11,2.19,2.91,0,2.09-1.11,3.05-2.34,3.05-.75,0-1.35-.37-1.67-.95h-.02v3.17h-.55v-6.25Zm.55,1.7c0,.16.01.31.05.45.23.95.92,1.41,1.62,1.41,1.19,0,1.8-1.11,1.8-2.56,0-1.27-.6-2.47-1.76-2.47-.75,0-1.45.62-1.64,1.48-.02.15-.06.32-.06.48v1.21Z" style="fill: #9c9b9b;"/> + <path d="m600.58,88.31c.26.18.68.37,1.16.37.81,0,1.23-.46,1.23-1.07s-.31-.94-1.07-1.3c-.85-.39-1.37-.87-1.37-1.58,0-.83.66-1.51,1.66-1.51.5,0,.91.17,1.17.35l-.23.45c-.2-.14-.52-.32-1.01-.32-.68,0-1.04.45-1.04.95,0,.56.33.81,1.05,1.17.81.37,1.39.83,1.39,1.7,0,1.02-.75,1.64-1.81,1.64-.5,0-.99-.16-1.33-.38l.21-.46Z" style="fill: #9c9b9b;"/> + <path d="m605.07,84.68c-.26,0-.45-.23-.45-.51s.19-.52.46-.52.46.23.46.52-.18.51-.46.51h-.01Zm0,4.49c-.26,0-.45-.23-.45-.51s.19-.52.46-.52.46.23.46.52-.18.51-.46.51h-.01Z" style="fill: #9c9b9b;"/> + <path d="m606.04,89.53l3.38-8.63h.55l-3.4,8.63h-.54Z" style="fill: #9c9b9b;"/> + <path d="m609.97,89.53l3.38-8.63h.55l-3.4,8.63h-.54Z" style="fill: #9c9b9b;"/> + <path d="m614.86,89.05v-5.27h-.79v-.45h.79v-.38c0-1.39.63-2.42,2.07-2.42.48,0,.98.18,1.2.37l-.24.43c-.21-.15-.62-.32-1.06-.32-1.13,0-1.43.88-1.43,1.99v.33h2.98v5.72h-.55v-5.27h-2.43v5.27h-.55Z" style="fill: #9c9b9b;"/> + <path d="m624.24,80.65v7.11c0,.38.02.94.05,1.29h-.49l-.04-.99h-.04c-.26.57-.86,1.11-1.79,1.11-1.23,0-2.2-1.08-2.2-2.9,0-1.96,1.06-3.06,2.3-3.06.8,0,1.37.41,1.63.93h.02v-3.49h.55Zm-.55,4.91c0-.14-.01-.33-.05-.49-.14-.76-.76-1.39-1.57-1.39-1.13,0-1.78,1.13-1.78,2.55,0,1.29.55,2.48,1.74,2.48.67,0,1.36-.43,1.61-1.43.04-.14.05-.29.05-.45v-1.26Z" style="fill: #9c9b9b;"/> + <path d="m626.01,80.65h.55v8.4h-.55v-8.4Z" style="fill: #9c9b9b;"/> + <path d="m628.51,86.09c0,1.98.94,2.6,1.95,2.6.7,0,1.1-.14,1.38-.3l.13.43c-.24.14-.77.35-1.58.35-1.54,0-2.43-1.19-2.43-2.88,0-1.88.97-3.06,2.31-3.06,1.66,0,1.99,1.64,1.99,2.53,0,.14,0,.24-.01.34h-3.74Zm3.18-.44c.02-.98-.39-1.97-1.49-1.97s-1.57,1.07-1.67,1.97h3.16Z" style="fill: #9c9b9b;"/> + <path d="m633.68,89.17c-.26,0-.45-.23-.45-.51s.19-.52.46-.52.46.23.46.52-.18.51-.46.51h-.01Z" style="fill: #9c9b9b;"/> + <path d="m639.06,88.83c-.24.13-.76.33-1.45.33-1.49,0-2.49-1.14-2.49-2.92s1.07-3.03,2.67-3.03c.58,0,1.08.17,1.29.31l-.19.46c-.23-.13-.61-.29-1.16-.29-1.38,0-2.05,1.22-2.05,2.53,0,1.5.83,2.46,2.03,2.46.55,0,.93-.16,1.22-.29l.14.43Z" style="fill: #9c9b9b;"/> + <path d="m640.29,84.69c0-.58-.02-.92-.05-1.36h.51l.04.94h.02c.27-.6.92-1.06,1.75-1.06.57,0,1.79.32,1.79,2.25v3.59h-.55v-3.49c0-.96-.32-1.87-1.39-1.87-.7,0-1.32.52-1.51,1.27-.04.14-.06.33-.06.51v3.58h-.55v-4.36Z" style="fill: #9c9b9b;"/> + <path d="m646.09,84.93c0-.52-.02-1.1-.05-1.6h.51l.02,1.08h.02c.23-.66.79-1.2,1.51-1.2.07,0,.13.01.19.01v.55c-.06-.01-.13-.01-.21-.01-.77,0-1.3.69-1.42,1.55-.02.17-.04.35-.04.52v3.22h-.55v-4.12Z" style="fill: #9c9b9b;"/> + <path d="m649.1,88.31c.26.18.68.37,1.16.37.81,0,1.23-.46,1.23-1.07s-.31-.94-1.07-1.3c-.85-.39-1.37-.87-1.37-1.58,0-.83.66-1.51,1.66-1.51.5,0,.91.17,1.17.35l-.23.45c-.2-.14-.52-.32-1.01-.32-.68,0-1.04.45-1.04.95,0,.56.33.81,1.05,1.17.81.37,1.39.83,1.39,1.7,0,1.02-.75,1.64-1.81,1.64-.5,0-.99-.16-1.33-.38l.21-.46Z" style="fill: #9c9b9b;"/> + <path d="m653.45,89.17c-.26,0-.45-.23-.45-.51s.19-.52.46-.52.46.23.46.52-.18.51-.46.51h-.01Z" style="fill: #9c9b9b;"/> + <path d="m655.38,89.05v-5.27h-.79v-.45h.79v-.34c0-.8.14-1.5.57-1.97.35-.36.81-.49,1.19-.49.33,0,.62.08.77.18l-.17.44c-.14-.07-.35-.14-.66-.14-.98,0-1.17.94-1.17,1.97v.36h1.39v.45h-1.39v5.27h-.55Z" style="fill: #9c9b9b;"/> + <path d="m658.18,84.93c0-.52-.02-1.1-.05-1.6h.51l.02,1.08h.02c.23-.66.79-1.2,1.51-1.2.07,0,.13.01.19.01v.55c-.06-.01-.13-.01-.21-.01-.77,0-1.3.69-1.42,1.55-.02.17-.04.35-.04.52v3.22h-.55v-4.12Z" style="fill: #9c9b9b;"/> + </g> + </g> + </g> +</svg> \ No newline at end of file diff --git a/fidle/img/00-Fidle-logo-01-80px.svg b/fidle/img/logo-paysage-80px.svg similarity index 100% rename from fidle/img/00-Fidle-logo-01-80px.svg rename to fidle/img/logo-paysage-80px.svg diff --git a/fidle/img/logo-paysage.svg b/fidle/img/logo-paysage.svg new file mode 100644 index 0000000..33f1cbc --- /dev/null +++ b/fidle/img/logo-paysage.svg @@ -0,0 +1,27 @@ +<?xml version="1.0" encoding="UTF-8"?> +<svg id="b" data-name="Calque 2" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 220.59 73.39"> + <g id="c" data-name="Iconographie"> + <g> + <g> + <g> + <path d="m46.33,56.11c-5.14,1.4-6.85,1.33-7.38,1.21.39-.78,2.21-2.51,4.13-3.88.45-.32.56-.96.25-1.42-.32-.46-.94-.57-1.39-.25-2.71,1.93-5.76,4.61-4.91,6.47.32.71,1,1.18,2.62,1.18,1.47,0,3.73-.39,7.2-1.33.53-.15.85-.71.71-1.25-.14-.55-.69-.87-1.23-.72Z" style="fill: none;"/> + <path d="m57.93,9.83c.97.54,3.01-.64,4.25,1.12.64.92-1.35-4.16-2.31-4.7s-2.19-.18-2.72.8c-.53.99-.18,2.23.79,2.77Z" style="fill: #e12229;"/> + <path d="m55.88,44.42c-.42-.5-.36-1.24.13-1.66.49-.42,1.22-.36,1.63.13,6.09,7.28,15.95,9.21,25.77,5.08,1-2.8,1.49-5.73,1.56-8.6,0-7.79-3.7-14.98-10.01-19.31-2.92-2.01-9.67-4.68-18.52-3.19-6.51,1.1-13.25,6.12-15.89,8.37-1.59,1.36-20.49,19.64-28.37,15.99-5.33-2.46,4.19-11.43-.18-19.63-.12-.22-.43-.24-.58-.04-2.22,3.06-4.4,6.73-7.36,5.08-1.32-.74-2.51-3.09-3.43-4.58-.18-.3-.65-.15-.63.2.34,10.29,5.22,17.87,9.15,22.67,6.8,8.3,18.71,17.96,37.96,19.92,19.84,1.84,30.4-4.97,34.99-13.88-2.99,1.01-5.96,1.51-8.82,1.51-6.78,0-12.97-2.77-17.4-8.06Zm-9.03,13.67c-3.47.95-5.73,1.33-7.2,1.33-1.63,0-2.3-.47-2.62-1.18-.85-1.86,2.2-4.54,4.91-6.47.46-.32,1.08-.21,1.39.25.32.46.2,1.1-.25,1.42-1.92,1.37-3.75,3.09-4.13,3.88.53.12,2.25.19,7.38-1.21.53-.15,1.08.18,1.23.72.14.55-.17,1.11-.71,1.25Z" style="fill: #e12229;"/> + <path d="m65.03,14.02c1.24-1.76,3.28-2.62,4.25-3.17s1.32-1.79.79-2.77c-.53-.99-1.77-1.37-2.72-.8-2.49,1.49-2.96,7.66-2.31,6.74Z" style="fill: #e12229;"/> + <path d="m64.64,5.17c.44-1.38,1.55-2.39,2.05-2.95.5-.56.49-1.41-.03-1.89-.52-.48-1.36-.43-1.85.15-1.28,1.51-.4,5.41-.17,4.69Z" style="fill: #e12229;"/> + </g> + <g> + <ellipse cx="70.02" cy="37.22" rx="3.7" ry="4.08" style="fill: #fff;"/> + <ellipse cx="71.51" cy="38.12" rx="2.5" ry="2.55" style="fill: #e12229;"/> + </g> + </g> + <g> + <path d="m102.35,22.65h18.29v2.2h-15.65v14.72h14.39v2.2h-14.39v17.9h-2.64V22.65Z" style="fill: #808184;"/> + <path d="m130.36,22.65v37.01h-2.64V22.65h2.64Z" style="fill: #808184;"/> + <path d="m139.31,23.2c2.86-.49,5.99-.82,9.5-.82,6.75,0,11.86,1.81,14.94,5.05,2.97,3.08,4.61,7.25,4.61,12.85s-1.48,10.32-4.83,14c-3.24,3.57-8.84,5.66-15.98,5.66-2.86,0-5.55-.05-8.24-.33V23.2Zm2.64,34.27c1.48.22,3.62.27,5.99.27,11.86,0,17.74-6.65,17.74-17.35.11-9.39-5.11-15.82-17.02-15.82-2.86,0-5.11.27-6.7.6v32.29Z" style="fill: #808184;"/> + <path d="m174.84,22.65h2.64v34.82h16.48v2.2h-19.11V22.65Z" style="fill: #808184;"/> + <path d="m216.85,41.27h-14.77v16.2h16.53v2.2h-19.17V22.65h18.29v2.2h-15.65v14.22h14.77v2.2Z" style="fill: #808184;"/> + </g> + </g> + </g> +</svg> \ No newline at end of file diff --git a/fidle/img/title.svg b/fidle/img/title.svg new file mode 100644 index 0000000..40a9f33 --- /dev/null +++ b/fidle/img/title.svg @@ -0,0 +1,71 @@ +<?xml version="1.0" encoding="UTF-8"?> +<svg id="b" data-name="Calque 2" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 584.25 138.43"> + <g id="c" data-name="Iconographie"> + <g> + <g> + <path d="m135.03,6.17h7.36v1.48h-5.57v4.54h5.15v1.46h-5.15v6.18h-1.78V6.17Z"/> + <path d="m148.23,20.05c-2.63,0-4.7-1.95-4.7-5.05,0-3.28,2.17-5.21,4.86-5.21s4.72,2.05,4.72,5.03c0,3.65-2.53,5.23-4.86,5.23h-.02Zm.08-1.34c1.7,0,2.98-1.6,2.98-3.83,0-1.66-.83-3.75-2.94-3.75s-3,1.95-3,3.81c0,2.15,1.22,3.77,2.94,3.77h.02Z"/> + <path d="m155.36,13.08c0-1.16-.02-2.15-.08-3.06h1.56l.08,1.92h.06c.45-1.32,1.54-2.15,2.74-2.15.18,0,.32.02.49.04v1.68c-.18-.02-.36-.04-.61-.04-1.26,0-2.15.95-2.39,2.27-.04.26-.06.55-.06.83v5.25h-1.78v-6.75Z"/> + <path d="m162.07,12.67c0-1.03-.04-1.84-.08-2.65h1.56l.08,1.58h.06c.55-.93,1.46-1.8,3.1-1.8,1.32,0,2.33.81,2.76,1.96h.04c.3-.57.71-.97,1.11-1.28.59-.45,1.22-.69,2.15-.69,1.32,0,3.24.85,3.24,4.25v5.78h-1.74v-5.55c0-1.91-.71-3.02-2.13-3.02-1.03,0-1.8.75-2.13,1.6-.08.26-.14.59-.14.89v6.08h-1.74v-5.9c0-1.56-.69-2.67-2.05-2.67-1.09,0-1.92.89-2.21,1.78-.1.26-.14.57-.14.87v5.92h-1.74v-7.15Z"/> + <path d="m185.88,17.48c0,.85.04,1.68.14,2.35h-1.6l-.16-1.24h-.06c-.53.77-1.6,1.46-3,1.46-1.99,0-3-1.4-3-2.82,0-2.37,2.11-3.67,5.9-3.65v-.2c0-.81-.22-2.29-2.23-2.27-.93,0-1.88.26-2.57.73l-.41-1.2c.81-.51,2.01-.85,3.24-.85,3.02,0,3.75,2.05,3.75,4.01v3.67Zm-1.74-2.65c-1.95-.04-4.15.3-4.15,2.21,0,1.17.77,1.7,1.66,1.7,1.3,0,2.13-.81,2.41-1.64.06-.18.08-.38.08-.57v-1.7Z"/> + <path d="m190.81,7.67v2.35h2.55v1.36h-2.55v5.27c0,1.22.34,1.91,1.34,1.91.49,0,.77-.04,1.03-.12l.08,1.36c-.34.12-.89.24-1.58.24-.83,0-1.5-.28-1.93-.75-.49-.55-.69-1.42-.69-2.57v-5.33h-1.52v-1.36h1.52v-1.8l1.74-.55Z"/> + <path d="m196.22,8.36c-.65,0-1.07-.51-1.07-1.09,0-.63.45-1.11,1.11-1.11s1.09.49,1.09,1.11c0,.59-.43,1.09-1.11,1.09h-.02Zm-.85,11.47v-9.81h1.78v9.81h-1.78Z"/> + <path d="m204.1,20.05c-2.63,0-4.7-1.95-4.7-5.05,0-3.28,2.17-5.21,4.86-5.21s4.72,2.05,4.72,5.03c0,3.65-2.53,5.23-4.86,5.23h-.02Zm.08-1.34c1.7,0,2.98-1.6,2.98-3.83,0-1.66-.83-3.75-2.94-3.75s-3,1.95-3,3.81c0,2.15,1.22,3.77,2.94,3.77h.02Z"/> + <path d="m211.24,12.67c0-1.03-.02-1.84-.08-2.65h1.58l.1,1.6h.04c.49-.91,1.62-1.82,3.24-1.82,1.36,0,3.47.81,3.47,4.17v5.86h-1.78v-5.65c0-1.58-.59-2.92-2.27-2.92-1.16,0-2.07.83-2.39,1.82-.08.22-.12.55-.12.83v5.92h-1.78v-7.15Z"/> + </g> + <g> + <path d="m139.71,27.58v32.27h-4.21V27.58h4.21Z"/> + <path d="m147.32,42.95c0-2.44-.05-4.36-.19-6.27h3.73l.24,3.78h.1c1.15-2.15,3.83-4.31,7.66-4.31,3.21,0,8.19,1.92,8.19,9.86v13.84h-4.21v-13.36c0-3.73-1.39-6.89-5.36-6.89-2.73,0-4.88,1.96-5.65,4.31-.19.53-.29,1.29-.29,1.96v13.98h-4.21v-16.9Z"/> + <path d="m178.78,31.12v5.55h6.03v3.21h-6.03v12.45c0,2.87.81,4.5,3.16,4.5,1.15,0,1.82-.1,2.44-.29l.19,3.21c-.81.29-2.11.57-3.73.57-1.96,0-3.54-.67-4.55-1.77-1.15-1.29-1.63-3.35-1.63-6.08v-12.59h-3.59v-3.21h3.59v-4.26l4.12-1.29Z"/> + <path d="m189.55,43.9c0-2.73-.05-5.08-.19-7.23h3.69l.19,4.55h.14c1.05-3.11,3.64-5.08,6.46-5.08.43,0,.77.05,1.15.1v3.97c-.43-.05-.86-.1-1.44-.1-2.97,0-5.08,2.25-5.65,5.36-.1.62-.14,1.29-.14,1.96v12.4h-4.21v-15.94Z"/> + <path d="m214.16,60.38c-6.22,0-11.11-4.6-11.11-11.92,0-7.76,5.12-12.31,11.49-12.31s11.16,4.84,11.16,11.87c0,8.62-5.99,12.35-11.49,12.35h-.05Zm.19-3.16c4.02,0,7.04-3.78,7.04-9.05,0-3.93-1.96-8.86-6.94-8.86s-7.09,4.6-7.09,9c0,5.08,2.87,8.91,6.94,8.91h.05Z"/> + <path d="m251.02,25.85v28.01c0,2.06.05,4.41.19,5.99h-3.78l-.19-4.02h-.1c-1.29,2.59-4.07,4.55-7.85,4.55-5.6,0-9.96-4.74-9.96-11.78-.05-7.76,4.79-12.45,10.39-12.45,3.59,0,5.99,1.68,6.99,3.5h.1v-13.79h4.21Zm-4.21,20.25c0-.57-.05-1.25-.19-1.82-.62-2.63-2.92-4.84-6.08-4.84-4.36,0-6.94,3.83-6.94,8.91,0,4.74,2.35,8.62,6.85,8.62,2.83,0,5.41-1.92,6.18-5.08.14-.53.19-1.1.19-1.77v-4.02Z"/> + <path d="m277.4,53.48c0,2.44.05,4.55.19,6.37h-3.73l-.24-3.78h-.1c-1.05,1.87-3.54,4.31-7.66,4.31-3.64,0-8-2.06-8-10.15v-13.55h4.21v12.78c0,4.41,1.39,7.42,5.17,7.42,2.82,0,4.79-1.96,5.55-3.88.24-.58.38-1.34.38-2.16v-14.17h4.21v16.81Z"/> + <path d="m300.91,58.99c-1.1.53-3.54,1.34-6.66,1.34-6.99,0-11.54-4.74-11.54-11.83s4.88-12.35,12.45-12.35c2.49,0,4.69.62,5.84,1.24l-.96,3.21c-1.01-.53-2.59-1.1-4.88-1.1-5.31,0-8.19,3.97-8.19,8.76,0,5.36,3.45,8.67,8.04,8.67,2.39,0,3.97-.57,5.17-1.1l.72,3.16Z"/> + <path d="m311.53,31.12v5.55h6.03v3.21h-6.03v12.45c0,2.87.81,4.5,3.16,4.5,1.15,0,1.82-.1,2.44-.29l.19,3.21c-.81.29-2.11.57-3.73.57-1.96,0-3.54-.67-4.55-1.77-1.15-1.29-1.63-3.35-1.63-6.08v-12.59h-3.59v-3.21h3.59v-4.26l4.12-1.29Z"/> + <path d="m324.31,32.75c-1.53,0-2.54-1.2-2.54-2.58,0-1.48,1.05-2.63,2.63-2.63s2.59,1.15,2.59,2.63c0,1.39-1.01,2.58-2.63,2.58h-.05Zm-2.01,27.1v-23.17h4.21v23.17h-4.21Z"/> + <path d="m342.94,60.38c-6.22,0-11.11-4.6-11.11-11.92,0-7.76,5.12-12.31,11.49-12.31s11.16,4.84,11.16,11.87c0,8.62-5.99,12.35-11.49,12.35h-.05Zm.19-3.16c4.02,0,7.04-3.78,7.04-9.05,0-3.93-1.96-8.86-6.94-8.86s-7.09,4.6-7.09,9c0,5.08,2.87,8.91,6.94,8.91h.05Z"/> + <path d="m359.79,42.95c0-2.44-.05-4.36-.19-6.27h3.73l.24,3.78h.1c1.15-2.15,3.83-4.31,7.66-4.31,3.21,0,8.19,1.92,8.19,9.86v13.84h-4.21v-13.36c0-3.73-1.39-6.89-5.36-6.89-2.73,0-4.88,1.96-5.65,4.31-.19.53-.29,1.29-.29,1.96v13.98h-4.21v-16.9Z"/> + </g> + <g> + <path d="m399.85,57.28c0,.85.04,1.68.14,2.35h-1.6l-.16-1.24h-.06c-.53.77-1.6,1.46-3,1.46-1.99,0-3-1.4-3-2.82,0-2.37,2.11-3.67,5.9-3.65v-.2c0-.81-.22-2.29-2.23-2.27-.93,0-1.88.26-2.57.73l-.41-1.2c.81-.51,2.01-.85,3.24-.85,3.02,0,3.75,2.05,3.75,4.01v3.67Zm-1.74-2.65c-1.95-.04-4.15.3-4.15,2.21,0,1.17.77,1.7,1.66,1.7,1.3,0,2.13-.81,2.41-1.64.06-.18.08-.38.08-.57v-1.7Z"/> + <path d="m410.92,56.93c0,1.03.02,1.92.08,2.7h-1.58l-.1-1.6h-.04c-.45.79-1.5,1.82-3.24,1.82-1.54,0-3.38-.87-3.38-4.3v-5.73h1.78v5.41c0,1.86.59,3.14,2.19,3.14,1.2,0,2.03-.83,2.35-1.64.1-.24.16-.57.16-.91v-6h1.78v7.11Z"/> + </g> + <g> + <path d="m135.91,70.66c4-.59,8.67-1.04,13.78-1.04,9.26,0,15.85,2.15,20.22,6.22,4.52,4.07,7.11,9.85,7.11,17.92s-2.59,14.81-7.18,19.41c-4.74,4.67-12.44,7.18-22.15,7.18-4.67,0-8.44-.15-11.78-.59v-49.11Zm6.52,44.22c1.63.3,4,.3,6.52.3,13.78.07,21.26-7.7,21.26-21.18.07-11.78-6.59-19.26-20.22-19.26-3.33,0-5.85.3-7.55.67v39.48Z" style="fill: #e12229;"/> + <path d="m189.09,103.17c.15,8.74,5.7,12.37,12.22,12.37,4.67,0,7.55-.81,9.92-1.85l1.19,4.67c-2.3,1.04-6.3,2.3-12,2.3-10.96,0-17.63-7.33-17.63-18.07s6.44-19.33,16.89-19.33c11.7,0,14.74,10.3,14.74,16.89,0,1.33-.07,2.3-.22,3.04h-25.11Zm19.03-4.67c.07-4.15-1.7-10.59-8.96-10.59-6.67,0-9.48,6.07-10,10.59h18.96Z" style="fill: #e12229;"/> + <path d="m226.2,103.17c.15,8.74,5.7,12.37,12.22,12.37,4.67,0,7.55-.81,9.92-1.85l1.19,4.67c-2.3,1.04-6.3,2.3-12,2.3-10.96,0-17.63-7.33-17.63-18.07s6.44-19.33,16.89-19.33c11.7,0,14.74,10.3,14.74,16.89,0,1.33-.07,2.3-.22,3.04h-25.11Zm19.03-4.67c.07-4.15-1.7-10.59-8.96-10.59-6.67,0-9.48,6.07-10,10.59h18.96Z" style="fill: #e12229;"/> + <path d="m259.6,95.76c0-4.59-.07-8.3-.3-11.7h5.85l.37,6.15h.15c2.59-4.44,6.89-6.96,12.67-6.96,8.74,0,15.26,7.33,15.26,18.22,0,12.89-7.93,19.26-16.29,19.26-4.81,0-8.89-2.07-11.04-5.63h-.15v19.48h-6.52v-38.81Zm6.52,9.55c0,.89.07,1.85.3,2.67,1.11,4.52,5.11,7.63,9.7,7.63,6.89,0,10.89-5.63,10.89-13.85,0-7.18-3.7-13.33-10.59-13.33-4.44,0-8.67,3.11-9.92,8.07-.15.82-.37,1.78-.37,2.59v6.22Z" style="fill: #e12229;"/> + <path d="m317.6,69.99h6.52v44.51h21.33v5.41h-27.85v-49.92Z" style="fill: #e12229;"/> + <path d="m355.08,103.17c.15,8.74,5.7,12.37,12.22,12.37,4.67,0,7.55-.81,9.92-1.85l1.19,4.67c-2.3,1.04-6.3,2.3-12,2.3-10.96,0-17.63-7.33-17.63-18.07s6.44-19.33,16.89-19.33c11.7,0,14.74,10.3,14.74,16.89,0,1.33-.07,2.3-.22,3.04h-25.11Zm19.03-4.67c.07-4.15-1.7-10.59-8.96-10.59-6.67,0-9.48,6.07-10,10.59h18.96Z" style="fill: #e12229;"/> + <path d="m413.74,111.32c0,3.11.15,6.15.52,8.59h-5.85l-.59-4.52h-.22c-1.93,2.81-5.85,5.33-10.96,5.33-7.26,0-10.96-5.11-10.96-10.29,0-8.67,7.7-13.41,21.55-13.33v-.74c0-2.96-.81-8.37-8.15-8.3-3.41,0-6.89.96-9.41,2.67l-1.48-4.37c2.96-1.85,7.33-3.11,11.85-3.11,11.04,0,13.7,7.48,13.7,14.67v13.41Zm-6.37-9.7c-7.11-.15-15.18,1.11-15.18,8.07,0,4.3,2.81,6.22,6.07,6.22,4.74,0,7.78-2.96,8.81-6,.22-.67.3-1.41.3-2.07v-6.22Z" style="fill: #e12229;"/> + <path d="m424.18,95.25c0-4.22-.07-7.85-.3-11.18h5.7l.3,7.04h.22c1.63-4.81,5.63-7.85,10-7.85.67,0,1.19.07,1.78.15v6.15c-.67-.07-1.33-.15-2.22-.15-4.59,0-7.85,3.48-8.74,8.3-.15.96-.22,2-.22,3.04v19.18h-6.52v-24.66Z" style="fill: #e12229;"/> + <path d="m448.7,93.76c0-3.78-.07-6.74-.3-9.7h5.78l.37,5.85h.15c1.78-3.33,5.93-6.67,11.85-6.67,4.96,0,12.67,2.96,12.67,15.26v21.41h-6.52v-20.66c0-5.78-2.15-10.67-8.3-10.67-4.22,0-7.55,3.04-8.74,6.67-.3.81-.44,2-.44,3.04v21.63h-6.52v-26.15Z" style="fill: #e12229;"/> + <path d="m492.92,77.99c-2.37,0-3.93-1.85-3.93-4,0-2.3,1.63-4.07,4.07-4.07s4,1.78,4,4.07c0,2.15-1.56,4-4.07,4h-.07Zm-3.11,41.92v-35.85h6.52v35.85h-6.52Z" style="fill: #e12229;"/> + <path d="m507.14,93.76c0-3.78-.07-6.74-.3-9.7h5.78l.37,5.85h.15c1.78-3.33,5.93-6.67,11.85-6.67,4.96,0,12.67,2.96,12.67,15.26v21.41h-6.52v-20.66c0-5.78-2.15-10.67-8.3-10.67-4.22,0-7.55,3.04-8.74,6.67-.3.81-.44,2-.44,3.04v21.63h-6.52v-26.15Z" style="fill: #e12229;"/> + <path d="m578.91,114.65c0,8.3-1.7,13.26-5.11,16.44-3.56,3.19-8.59,4.3-13.11,4.3s-9.04-1.04-11.92-2.96l1.63-5.04c2.37,1.56,6.07,2.89,10.52,2.89,6.67,0,11.55-3.48,11.55-12.59v-3.93h-.15c-1.93,3.33-5.85,6-11.41,6-8.89,0-15.26-7.55-15.26-17.55,0-12.15,7.93-18.96,16.22-18.96,6.22,0,9.55,3.19,11.18,6.22h.15l.22-5.41h5.78c-.22,2.52-.3,5.48-.3,9.78v20.81Zm-6.52-16.52c0-1.04-.07-2-.37-2.89-1.19-3.85-4.3-6.96-9.11-6.96-6.22,0-10.67,5.26-10.67,13.63,0,6.96,3.63,12.89,10.59,12.89,4.07,0,7.7-2.52,9.04-6.74.37-1.04.52-2.3.52-3.48v-6.44Z" style="fill: #e12229;"/> + </g> + <g> + <path d="m4.22,91.6h14.1v1.69H6.26v11.35h11.1v1.69H6.26v13.81h-2.03v-28.55Z" style="fill: #808184;"/> + <path d="m25.83,91.6v28.55h-2.03v-28.55h2.03Z" style="fill: #808184;"/> + <path d="m32.74,92.02c2.2-.38,4.62-.64,7.33-.64,5.21,0,9.15,1.4,11.52,3.9,2.29,2.37,3.56,5.59,3.56,9.91s-1.14,7.96-3.73,10.8c-2.5,2.75-6.82,4.36-12.33,4.36-2.2,0-4.28-.04-6.35-.25v-28.08Zm2.03,26.43c1.14.17,2.8.21,4.62.21,9.15,0,13.68-5.13,13.68-13.39.08-7.24-3.94-12.2-13.13-12.2-2.2,0-3.94.21-5.17.47v24.91Z" style="fill: #808184;"/> + <path d="m60.14,91.6h2.03v26.86h12.71v1.69h-14.74v-28.55Z" style="fill: #808184;"/> + <path d="m92.55,105.96h-11.39v12.5h12.75v1.69h-14.78v-28.55h14.1v1.69h-12.07v10.97h11.39v1.69Z" style="fill: #808184;"/> + </g> + <line x1="114.45" x2="114.45" y2="119.25" style="fill: none; stroke: #cfd1d3; stroke-miterlimit: 10; stroke-width: .27px;"/> + <g> + <g> + <path d="m52.17,64.26c-5.78,1.58-7.72,1.5-8.31,1.36.44-.88,2.49-2.83,4.66-4.37.51-.36.63-1.08.28-1.6-.36-.52-1.06-.65-1.57-.29-3.05,2.17-6.48,5.2-5.53,7.29.36.8,1.12,1.33,2.95,1.32,1.66,0,4.2-.43,8.11-1.5.6-.16.96-.79.8-1.41-.16-.61-.78-.98-1.38-.81Z" style="fill: none;"/> + <path d="m65.24,12.15c1.09.61,3.39-.72,4.78,1.27.72,1.03-1.52-4.68-2.61-5.3s-2.46-.21-3.06.9c-.6,1.11-.2,2.51.89,3.12Z" style="fill: #e12229;"/> + <path d="m62.93,51.09c-.47-.56-.4-1.4.15-1.87.55-.48,1.37-.41,1.84.15,6.85,8.19,17.96,10.38,29.02,5.72,1.13-3.15,1.68-6.45,1.76-9.68,0-8.77-4.17-16.87-11.27-21.75-3.29-2.26-10.89-5.27-20.85-3.59-7.33,1.24-14.92,6.89-17.89,9.43-1.79,1.53-23.07,22.11-31.95,18.01-6.01-2.78,4.72-12.88-.2-22.11-.13-.25-.49-.27-.65-.04-2.5,3.45-4.95,7.58-8.29,5.72-1.49-.83-2.83-3.48-3.86-5.16-.21-.34-.73-.17-.71.23.38,11.59,5.88,20.13,10.31,25.53,7.65,9.34,21.07,20.22,42.74,22.43,22.34,2.07,34.24-5.6,39.41-15.63-3.36,1.13-6.71,1.7-9.94,1.7-7.63,0-14.61-3.11-19.6-9.08Zm-10.17,15.39c-3.91,1.07-6.45,1.5-8.11,1.5-1.83,0-2.59-.53-2.95-1.32-.95-2.09,2.48-5.12,5.53-7.29.51-.36,1.21-.23,1.57.29.36.52.23,1.24-.28,1.6-2.16,1.54-4.22,3.48-4.66,4.37.6.13,2.53.21,8.31-1.36.6-.16,1.22.2,1.38.81.16.61-.19,1.25-.8,1.41Z" style="fill: #e12229;"/> + <path d="m73.23,16.86c1.39-1.99,3.69-2.95,4.78-3.57,1.09-.61,1.49-2.01.89-3.12-.6-1.11-1.99-1.55-3.06-.9-2.81,1.68-3.33,8.63-2.61,7.59Z" style="fill: #e12229;"/> + <path d="m72.79,6.9c.5-1.55,1.75-2.69,2.31-3.32.57-.63.55-1.59-.04-2.13-.58-.54-1.53-.48-2.08.17-1.44,1.7-.45,6.09-.2,5.29Z" style="fill: #e12229;"/> + </g> + <g> + <ellipse cx="78.85" cy="42.99" rx="4.17" ry="4.59" style="fill: #fff;"/> + <ellipse cx="80.53" cy="44.01" rx="2.81" ry="2.87" style="fill: #e12229;"/> + </g> + </g> + </g> + </g> +</svg> \ No newline at end of file -- GitLab