"pytorch/vscode:/vscode.git/clone" did not exist on "49732231d7fcc088019afe015ec78476babab381"
Unverified Commit 564f8d4d authored by Mark Daoust's avatar Mark Daoust Committed by GitHub
Browse files

Merge pull request #4608 from yashk2810/activation_change

Use TensorFlow functions instead of Keras strings
parents a81e1e7c 18de5380
...@@ -292,9 +292,9 @@ ...@@ -292,9 +292,9 @@
"cell_type": "code", "cell_type": "code",
"source": [ "source": [
"baseline_model = keras.Sequential([\n", "baseline_model = keras.Sequential([\n",
" keras.layers.Dense(16, activation='relu', input_shape=(10000,)),\n", " keras.layers.Dense(16, activation=tf.nn.relu, input_shape=(10000,)),\n",
" keras.layers.Dense(16, activation='relu'),\n", " keras.layers.Dense(16, activation=tf.nn.relu),\n",
" keras.layers.Dense(1, activation='sigmoid')\n", " keras.layers.Dense(1, activation=tf.nn.sigmoid)\n",
"])\n", "])\n",
"\n", "\n",
"baseline_model.compile(optimizer='adam',\n", "baseline_model.compile(optimizer='adam',\n",
...@@ -363,9 +363,9 @@ ...@@ -363,9 +363,9 @@
"cell_type": "code", "cell_type": "code",
"source": [ "source": [
"smaller_model = keras.Sequential([\n", "smaller_model = keras.Sequential([\n",
" keras.layers.Dense(4, activation='relu', input_shape=(10000,)),\n", " keras.layers.Dense(4, activation=tf.nn.relu, input_shape=(10000,)),\n",
" keras.layers.Dense(4, activation='relu'),\n", " keras.layers.Dense(4, activation=tf.nn.relu),\n",
" keras.layers.Dense(1, activation='sigmoid')\n", " keras.layers.Dense(1, activation=tf.nn.sigmoid)\n",
"])\n", "])\n",
"\n", "\n",
"smaller_model.compile(optimizer='adam',\n", "smaller_model.compile(optimizer='adam',\n",
...@@ -436,9 +436,9 @@ ...@@ -436,9 +436,9 @@
"cell_type": "code", "cell_type": "code",
"source": [ "source": [
"bigger_model = keras.models.Sequential([\n", "bigger_model = keras.models.Sequential([\n",
" keras.layers.Dense(512, activation='relu', input_shape=(10000,)),\n", " keras.layers.Dense(512, activation=tf.nn.relu, input_shape=(10000,)),\n",
" keras.layers.Dense(512, activation='relu'),\n", " keras.layers.Dense(512, activation=tf.nn.relu),\n",
" keras.layers.Dense(1, activation='sigmoid')\n", " keras.layers.Dense(1, activation=tf.nn.sigmoid)\n",
"])\n", "])\n",
"\n", "\n",
"bigger_model.compile(optimizer='adam',\n", "bigger_model.compile(optimizer='adam',\n",
...@@ -604,10 +604,10 @@ ...@@ -604,10 +604,10 @@
"source": [ "source": [
"l2_model = keras.models.Sequential([\n", "l2_model = keras.models.Sequential([\n",
" keras.layers.Dense(16, kernel_regularizer=keras.regularizers.l2(0.001),\n", " keras.layers.Dense(16, kernel_regularizer=keras.regularizers.l2(0.001),\n",
" activation='relu', input_shape=(10000,)),\n", " activation=tf.nn.relu, input_shape=(10000,)),\n",
" keras.layers.Dense(16, kernel_regularizer=keras.regularizers.l2(0.001),\n", " keras.layers.Dense(16, kernel_regularizer=keras.regularizers.l2(0.001),\n",
" activation='relu'),\n", " activation=tf.nn.relu),\n",
" keras.layers.Dense(1, activation='sigmoid')\n", " keras.layers.Dense(1, activation=tf.nn.sigmoid)\n",
"])\n", "])\n",
"\n", "\n",
"l2_model.compile(optimizer='adam',\n", "l2_model.compile(optimizer='adam',\n",
...@@ -695,11 +695,11 @@ ...@@ -695,11 +695,11 @@
"cell_type": "code", "cell_type": "code",
"source": [ "source": [
"dpt_model = keras.models.Sequential([\n", "dpt_model = keras.models.Sequential([\n",
" keras.layers.Dense(16, activation='relu', input_shape=(10000,)),\n", " keras.layers.Dense(16, activation=tf.nn.relu, input_shape=(10000,)),\n",
" keras.layers.Dropout(0.5),\n", " keras.layers.Dropout(0.5),\n",
" keras.layers.Dense(16, activation='relu'),\n", " keras.layers.Dense(16, activation=tf.nn.relu),\n",
" keras.layers.Dropout(0.5),\n", " keras.layers.Dropout(0.5),\n",
" keras.layers.Dense(1, activation='sigmoid')\n", " keras.layers.Dense(1, activation=tf.nn.sigmoid)\n",
"])\n", "])\n",
"\n", "\n",
"dpt_model.compile(optimizer='adam',\n", "dpt_model.compile(optimizer='adam',\n",
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment