From 314e6afbe40b050b63faaac1b07ba5aa00d4ec2b Mon Sep 17 00:00:00 2001 From: Soraya Arias <soraya.arias@inria.fr> Date: Mon, 22 Jan 2024 19:13:44 +0100 Subject: [PATCH] Update url for keras3 + misc url correction --- BHPD.Keras3/01-DNN-Regression.ipynb | 10 +++++----- BHPD.Keras3/02-DNN-Regression-Premium.ipynb | 10 +++++----- BHPD.PyTorch/01-DNN-Regression_PyTorch.ipynb | 6 +++--- Embedding.Keras3/01-One-hot-encoding.ipynb | 8 ++++---- Embedding.Keras3/02-Keras-embedding.ipynb | 8 ++++---- Embedding.Keras3/03-Prediction.ipynb | 2 +- Embedding.Keras3/05-LSTM-Keras.ipynb | 12 ++++++------ MNIST.Keras3/01-DNN-MNIST.ipynb | 10 +++++----- MNIST.Keras3/02-CNN-MNIST.ipynb | 10 +++++----- MNIST.PyTorch/01-DNN-MNIST_PyTorch.ipynb | 8 ++++---- Perceptron/01-Simple-Perceptron.ipynb | 2 +- Wine.Keras3/01-DNN-Wine-Regression.ipynb | 10 +++++----- 12 files changed, 48 insertions(+), 48 deletions(-) diff --git a/BHPD.Keras3/01-DNN-Regression.ipynb b/BHPD.Keras3/01-DNN-Regression.ipynb index fdb202a..df15889 100644 --- a/BHPD.Keras3/01-DNN-Regression.ipynb +++ b/BHPD.Keras3/01-DNN-Regression.ipynb @@ -124,7 +124,7 @@ "## Step 2 - Retrieve data\n", "\n", "### 2.1 - Option 1 : From Keras\n", - "Boston housing is a famous historic dataset, so we can get it directly from [Keras datasets](https://www.tensorflow.org/api_docs/python/tf/keras/datasets) " + "Boston housing is a famous historic dataset, so we can get it directly from [Keras datasets](https://keras.io/api/datasets) " ] }, { @@ -231,10 +231,10 @@ "source": [ "## Step 4 - Build a model\n", "About informations about : \n", - " - [Optimizer](https://www.tensorflow.org/api_docs/python/tf/keras/optimizers)\n", - " - [Activation](https://www.tensorflow.org/api_docs/python/tf/keras/activations)\n", - " - [Loss](https://www.tensorflow.org/api_docs/python/tf/keras/losses)\n", - " - [Metrics](https://www.tensorflow.org/api_docs/python/tf/keras/metrics)" + " - [Optimizer](https://keras.io/api/optimizers)\n", + " - [Activation](https://keras.io/api/layers/activations)\n", + " - [Loss](https://keras.io/api/losses)\n", + " - [Metrics](https://keras.io/api/metrics)" ] }, { diff --git a/BHPD.Keras3/02-DNN-Regression-Premium.ipynb b/BHPD.Keras3/02-DNN-Regression-Premium.ipynb index 65cdd8b..626b5e8 100644 --- a/BHPD.Keras3/02-DNN-Regression-Premium.ipynb +++ b/BHPD.Keras3/02-DNN-Regression-Premium.ipynb @@ -123,7 +123,7 @@ "## Step 2 - Retrieve data\n", "\n", "### 2.1 - Option 1 : From Keras\n", - "Boston housing is a famous historic dataset, so we can get it directly from [Keras datasets](https://www.tensorflow.org/api_docs/python/tf/keras/datasets) " + "Boston housing is a famous historic dataset, so we can get it directly from [Keras datasets](https://keras.io/api/datasets) " ] }, { @@ -226,10 +226,10 @@ "source": [ "## Step 4 - Build a model\n", "More informations about : \n", - " - [Optimizer](https://www.tensorflow.org/api_docs/python/tf/keras/optimizers)\n", - " - [Activation](https://www.tensorflow.org/api_docs/python/tf/keras/activations)\n", - " - [Loss](https://www.tensorflow.org/api_docs/python/tf/keras/losses)\n", - " - [Metrics](https://www.tensorflow.org/api_docs/python/tf/keras/metrics)" + " - [Optimizer](https://keras.io/api/optimizers)\n", + " - [Activation](https://keras.io/api/layers/activations)\n", + " - [Loss](https://keras.io/api/losses)\n", + " - [Metrics](https://keras.io/api/metrics)" ] }, { diff --git a/BHPD.PyTorch/01-DNN-Regression_PyTorch.ipynb b/BHPD.PyTorch/01-DNN-Regression_PyTorch.ipynb index e75b7ad..b2dcb48 100644 --- a/BHPD.PyTorch/01-DNN-Regression_PyTorch.ipynb +++ b/BHPD.PyTorch/01-DNN-Regression_PyTorch.ipynb @@ -208,7 +208,7 @@ "source": [ "## Step 5 - Train the model\n", "\n", - "#### 5.1 - stochastic gradient descent strategy to fit the model" + "### 5.1 - Stochastic gradient descent strategy to fit the model" ] }, { @@ -262,7 +262,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "##### 5.2 - get the model" + "#### 5.2 - Get the model" ] }, { @@ -283,7 +283,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "##### 5.3 - train the model" + "#### 5.3 - Train the model" ] }, { diff --git a/Embedding.Keras3/01-One-hot-encoding.ipynb b/Embedding.Keras3/01-One-hot-encoding.ipynb index 7c7451c..a66ba60 100644 --- a/Embedding.Keras3/01-One-hot-encoding.ipynb +++ b/Embedding.Keras3/01-One-hot-encoding.ipynb @@ -16,7 +16,7 @@ "\n", "Original dataset can be find **[there](http://ai.stanford.edu/~amaas/data/sentiment/)** \n", "Note that [IMDb.com](https://imdb.com) offers several easy-to-use [datasets](https://www.imdb.com/interfaces/) \n", - "For simplicity's sake, we'll use the dataset directly [embedded in Keras](https://www.tensorflow.org/api_docs/python/tf/keras/datasets)\n", + "For simplicity's sake, we'll use the dataset directly [embedded in Keras](https://keras.io/datasets)\n", "\n", "## What we're going to do :\n", "\n", @@ -177,7 +177,7 @@ "source": [ "## Step 3 - Retrieve data\n", "\n", - "IMDb dataset can bet get directly from Keras - see [documentation](https://www.tensorflow.org/api_docs/python/tf/keras/datasets) \n", + "IMDb dataset can bet get directly from Keras - see [documentation](https://keras.io/api/datasets/imdb/) \n", "Note : Due to their nature, textual data can be somewhat complex.\n", "\n", "### 3.1 - Data structure : \n", @@ -211,8 +211,8 @@ "metadata": {}, "source": [ "### 3.2 - Load dataset\n", - "For simplicity, we will use a pre-formatted dataset - See [documentation](https://www.tensorflow.org/api_docs/python/tf/keras/datasets/imdb/load_data) \n", - "However, Keras offers some usefull tools for formatting textual data - See [documentation](https://www.tensorflow.org/api_docs/python/tf/keras/preprocessing/text) \n", + "For simplicity, we will use a pre-formatted dataset - See [documentation](https://keras.io/api/datasets/imdb) \n", + "However, Keras offers some useful tools for formatting textual data - See [documentation](hhttps://keras.io/api/layers/preprocessing_layers/text/text_vectorization/) \n", "\n", "By default : \n", " - Start of a sequence will be marked with : 1\n", diff --git a/Embedding.Keras3/02-Keras-embedding.ipynb b/Embedding.Keras3/02-Keras-embedding.ipynb index 21f5fa0..face9b5 100644 --- a/Embedding.Keras3/02-Keras-embedding.ipynb +++ b/Embedding.Keras3/02-Keras-embedding.ipynb @@ -16,7 +16,7 @@ "\n", "Original dataset can be find **[there](http://ai.stanford.edu/~amaas/data/sentiment/)** \n", "Note that [IMDb.com](https://imdb.com) offers several easy-to-use [datasets](https://www.imdb.com/interfaces/) \n", - "For simplicity's sake, we'll use the dataset directly [embedded in Keras](https://www.tensorflow.org/api_docs/python/tf/keras/datasets)\n", + "For simplicity's sake, we'll use the dataset directly [embedded in Keras](https://keras.io/datasets)\n", "\n", "## What we're going to do :\n", "\n", @@ -114,7 +114,7 @@ "source": [ "## Step 2 - Retrieve data\n", "\n", - "IMDb dataset can bet get directly from Keras - see [documentation](https://www.tensorflow.org/api_docs/python/tf/keras/datasets) \n", + "IMDb dataset can bet get directly from Keras - see [documentation](https://keras.io/api/datasets) \n", "Note : Due to their nature, textual data can be somewhat complex.\n", "\n", "For more details about the management of this dataset, see notebook [IMDB1](01-One-hot-encoding.ipynb)" @@ -249,8 +249,8 @@ "## Step 4 - Build the model\n", "\n", "More documentation about this model functions :\n", - " - [Embedding](https://www.tensorflow.org/api_docs/python/tf/keras/layers/Embedding)\n", - " - [GlobalAveragePooling1D](https://www.tensorflow.org/api_docs/python/tf/keras/layers/GlobalAveragePooling1D)" + " - [Embedding](https://keras.io/api/layers/core_layers/embedding/)\n", + " - [GlobalAveragePooling1D](https://keras.io/api/layers/pooling_layers/global_average_pooling1d/)" ] }, { diff --git a/Embedding.Keras3/03-Prediction.ipynb b/Embedding.Keras3/03-Prediction.ipynb index ed16420..e6d478c 100644 --- a/Embedding.Keras3/03-Prediction.ipynb +++ b/Embedding.Keras3/03-Prediction.ipynb @@ -114,7 +114,7 @@ "metadata": {}, "source": [ "### 2.2 - Retrieve dictionaries\n", - "Note : This dictionary is generated by [01-Embedding-Keras](01-Embedding-Keras.ipynb) notebook." + "Note : This dictionary is generated by [02-Embedding-Keras](02-Keras-embedding.ipynb) notebook." ] }, { diff --git a/Embedding.Keras3/05-LSTM-Keras.ipynb b/Embedding.Keras3/05-LSTM-Keras.ipynb index 620c3c8..b5aa4bf 100644 --- a/Embedding.Keras3/05-LSTM-Keras.ipynb +++ b/Embedding.Keras3/05-LSTM-Keras.ipynb @@ -16,7 +16,7 @@ "\n", "Original dataset can be find **[there](http://ai.stanford.edu/~amaas/data/sentiment/)** \n", "Note that [IMDb.com](https://imdb.com) offers several easy-to-use [datasets](https://www.imdb.com/interfaces/) \n", - "For simplicity's sake, we'll use the dataset directly [embedded in Keras](https://www.tensorflow.org/api_docs/python/tf/keras/datasets)\n", + "For simplicity's sake, we'll use the dataset directly [embedded in Keras](https://keras.io/datasets)\n", "\n", "## What we're going to do :\n", "\n", @@ -111,7 +111,7 @@ "source": [ "## Step 3 - Retrieve data\n", "\n", - "IMDb dataset can bet get directly from Keras - see [documentation](https://www.tensorflow.org/api_docs/python/tf/keras/datasets) \n", + "IMDb dataset can bet get directly from Keras - see [documentation](https://keras.io/api/datasets) \n", "Note : Due to their nature, textual data can be somewhat complex." ] }, @@ -120,8 +120,8 @@ "metadata": {}, "source": [ "### 3.1 - Get dataset\n", - "For simplicity, we will use a pre-formatted dataset - See [documentation](https://www.tensorflow.org/api_docs/python/tf/keras/datasets/imdb/load_data) \n", - "However, Keras offers some usefull tools for formatting textual data - See [documentation](https://www.tensorflow.org/api_docs/python/tf/keras/preprocessing/text) \n", + "For simplicity, we will use a pre-formatted dataset - See [documentation](https://keras.io/api/datasets/imdb/) \n", + "However, Keras offers some usefull tools for formatting textual data - See [documentation](https://keras.io/api/layers/preprocessing_layers/text/text_vectorization/) \n", "\n", "**Load dataset :**" ] @@ -252,8 +252,8 @@ "## Step 5 - Build the model\n", "\n", "More documentation about this model functions :\n", - " - [Embedding](https://www.tensorflow.org/api_docs/python/tf/keras/layers/Embedding)\n", - " - [GlobalAveragePooling1D](https://www.tensorflow.org/api_docs/python/tf/keras/layers/GlobalAveragePooling1D)" + " - [Embedding](https://keras.io/api/layers/core_layers/embedding/)\n", + " - [GlobalAveragePooling1D](https://keras.io/api/layers/pooling_layers/global_average_pooling1d)" ] }, { diff --git a/MNIST.Keras3/01-DNN-MNIST.ipynb b/MNIST.Keras3/01-DNN-MNIST.ipynb index 81cd2d7..44f9649 100644 --- a/MNIST.Keras3/01-DNN-MNIST.ipynb +++ b/MNIST.Keras3/01-DNN-MNIST.ipynb @@ -96,7 +96,7 @@ "source": [ "## Step 2 - Retrieve data\n", "MNIST is one of the most famous historic dataset. \n", - "Include in [Keras datasets](https://www.tensorflow.org/api_docs/python/tf/keras/datasets)" + "Include in [Keras datasets](https://keras.io/datasets)" ] }, { @@ -158,10 +158,10 @@ "source": [ "## Step 4 - Create model\n", "About informations about : \n", - " - [Optimizer](https://www.tensorflow.org/api_docs/python/tf/keras/optimizers)\n", - " - [Activation](https://www.tensorflow.org/api_docs/python/tf/keras/activations)\n", - " - [Loss](https://www.tensorflow.org/api_docs/python/tf/keras/losses)\n", - " - [Metrics](https://www.tensorflow.org/api_docs/python/tf/keras/metrics)" + " - [Optimizer](https://keras.io/api/optimizers)\n", + " - [Activation](https://keras.io/api/layers/activations)\n", + " - [Loss](https://keras.io/api/losses)\n", + " - [Metrics](https://keras.io/api/metrics)" ] }, { diff --git a/MNIST.Keras3/02-CNN-MNIST.ipynb b/MNIST.Keras3/02-CNN-MNIST.ipynb index 19c6b22..014e46f 100644 --- a/MNIST.Keras3/02-CNN-MNIST.ipynb +++ b/MNIST.Keras3/02-CNN-MNIST.ipynb @@ -96,7 +96,7 @@ "source": [ "## Step 2 - Retrieve data\n", "MNIST is one of the most famous historic dataset. \n", - "Include in [Keras datasets](https://www.tensorflow.org/api_docs/python/tf/keras/datasets)" + "Include in [Keras datasets](https://keras.io/datasets)" ] }, { @@ -161,10 +161,10 @@ "source": [ "## Step 4 - Create model\n", "About informations about : \n", - " - [Optimizer](https://www.tensorflow.org/api_docs/python/tf/keras/optimizers)\n", - " - [Activation](https://www.tensorflow.org/api_docs/python/tf/keras/activations)\n", - " - [Loss](https://www.tensorflow.org/api_docs/python/tf/keras/losses)\n", - " - [Metrics](https://www.tensorflow.org/api_docs/python/tf/keras/metrics)" + " - [Optimizer](https://keras.io/api/optimizers)\n", + " - [Activation](https://keras.io/api/layers/activations)\n", + " - [Loss](https://keras.io/api/losses)\n", + " - [Metrics](https://keras.io/api/metrics)" ] }, { diff --git a/MNIST.PyTorch/01-DNN-MNIST_PyTorch.ipynb b/MNIST.PyTorch/01-DNN-MNIST_PyTorch.ipynb index 94e1c34..674eee5 100644 --- a/MNIST.PyTorch/01-DNN-MNIST_PyTorch.ipynb +++ b/MNIST.PyTorch/01-DNN-MNIST_PyTorch.ipynb @@ -66,7 +66,7 @@ "source": [ "## Step 2 - Retrieve data\n", "MNIST is one of the most famous historic dataset. \n", - "Include in [torchvision datasets](https://pytorch.org/docs/stable/torchvision/datasets.html)" + "Include in [torchvision datasets](https://pytorch.org/vision/stable/datasets.html)" ] }, { @@ -189,9 +189,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Step 5 - Train the model\n", + "## Step 5 - Train the model\n", "\n", - "#### 5.1 - stochastic gradient descent strategy to fit the model\n" + "### 5.1 - Stochastic gradient descent strategy to fit the model\n" ] }, { @@ -246,7 +246,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "##### 5.2 - fit the model" + "### 5.2 - Fit the model" ] }, { diff --git a/Perceptron/01-Simple-Perceptron.ipynb b/Perceptron/01-Simple-Perceptron.ipynb index aab34c2..ff37a5a 100644 --- a/Perceptron/01-Simple-Perceptron.ipynb +++ b/Perceptron/01-Simple-Perceptron.ipynb @@ -52,7 +52,7 @@ "## Step 2 - Prepare IRIS Dataset\n", "\n", "Retrieve a dataset : http://scikit-learn.org/stable/modules/classes.html#module-sklearn.datasets \n", - "About the datesets : http://scikit-learn.org/stable/datasets/index.html \n", + "About the datesets : https://scikit-learn.org/stable/datasets.html#datasets \n", "\n", "Data fields (X) :\n", "- 0 : sepal length in cm\n", diff --git a/Wine.Keras3/01-DNN-Wine-Regression.ipynb b/Wine.Keras3/01-DNN-Wine-Regression.ipynb index f3abe84..dd9d1c5 100644 --- a/Wine.Keras3/01-DNN-Wine-Regression.ipynb +++ b/Wine.Keras3/01-DNN-Wine-Regression.ipynb @@ -18,7 +18,7 @@ "This dataset is provide by : \n", "Paulo Cortez, University of Minho, Guimarães, Portugal, http://www3.dsi.uminho.pt/pcortez \n", "A. Cerdeira, F. Almeida, T. Matos and J. Reis, Viticulture Commission of the Vinho Verde Region(CVRVV), Porto, Portugal, @2009 \n", - "This dataset can be retreive at [University of California Irvine (UCI)](https://archive-beta.ics.uci.edu/ml/datasets/wine+quality)\n", + "This dataset can be retreive at [University of California Irvine (UCI)](https://archive.ics.uci.edu/dataset/186/wine+quality)\n", "\n", "\n", "Due to privacy and logistic issues, only physicochemical and sensory variables are available \n", @@ -202,10 +202,10 @@ "source": [ "## Step 4 - Build a model\n", "More informations about : \n", - " - [Optimizer](https://www.tensorflow.org/api_docs/python/tf/keras/optimizers)\n", - " - [Activation](https://www.tensorflow.org/api_docs/python/tf/keras/activations)\n", - " - [Loss](https://www.tensorflow.org/api_docs/python/tf/keras/losses)\n", - " - [Metrics](https://www.tensorflow.org/api_docs/python/tf/keras/metrics)" + " - [Optimizer](https://keras.io/api/optimizers)\n", + " - [Activation](https://keras.io/api/layers/activations)\n", + " - [Loss](https://keras.io/api/losses)\n", + " - [Metrics](https://keras.io/api/metrics)" ] }, { -- GitLab