Skip to content

Commit a558896

Browse files
committed
Créé avec Colaboratory
1 parent 6d42cac commit a558896

File tree

1 file changed

+104
-0
lines changed

1 file changed

+104
-0
lines changed

P100.ipynb

+104
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,110 @@
6767
"name": "stdout"
6868
}
6969
]
70+
},
71+
{
72+
"cell_type": "code",
73+
"metadata": {
74+
"id": "CPUS4nwiAlfd"
75+
},
76+
"source": [
77+
"import keras\n",
78+
"import numpy as np\n",
79+
"import tensorflow as tf\n",
80+
"\n",
81+
"from keras.layers import Input, Dense, Lambda\n",
82+
"from keras.models import Model\n",
83+
"from keras import backend as K\n",
84+
"\n",
85+
"from tensorflow.keras.applications.resnet50 import ResNet50\n",
86+
"from tensorflow.keras.preprocessing import image\n",
87+
"from tensorflow.keras.applications.resnet50 import preprocess_input, decode_predictions"
88+
],
89+
"execution_count": 1,
90+
"outputs": []
91+
},
92+
{
93+
"cell_type": "code",
94+
"metadata": {
95+
"id": "nvu-hhKlCcsg"
96+
},
97+
"source": [
98+
"def triplet_loss(inputs, dist='sqeuclidean', margin='maxplus'):\n",
99+
" anchor, positive, negative = inputs\n",
100+
" positive_distance = K.square(anchor - positive)\n",
101+
" negative_distance = K.square(anchor - negative)\n",
102+
" if dist == 'euclidean':\n",
103+
" positive_distance = K.sqrt(K.sum(positive_distance, axis=-1, keepdims=True))\n",
104+
" negative_distance = K.sqrt(K.sum(negative_distance, axis=-1, keepdims=True))\n",
105+
" elif dist == 'sqeuclidean':\n",
106+
" positive_distance = K.sum(positive_distance, axis=-1, keepdims=True)\n",
107+
" negative_distance = K.sum(negative_distance, axis=-1, keepdims=True)\n",
108+
" loss = positive_distance - negative_distance\n",
109+
" if margin == 'maxplus':\n",
110+
" loss = K.maximum(0.0, 1 + loss)\n",
111+
" elif margin == 'softplus':\n",
112+
" loss = K.log(1 + K.exp(loss))\n",
113+
" return K.mean(loss)\n",
114+
"\n",
115+
"def triplet_loss_np(inputs, dist='sqeuclidean', margin='maxplus'):\n",
116+
" anchor, positive, negative = inputs\n",
117+
" positive_distance = np.square(anchor - positive)\n",
118+
" negative_distance = np.square(anchor - negative)\n",
119+
" if dist == 'euclidean':\n",
120+
" positive_distance = np.sqrt(np.sum(positive_distance, axis=-1, keepdims=True))\n",
121+
" negative_distance = np.sqrt(np.sum(negative_distance, axis=-1, keepdims=True))\n",
122+
" elif dist == 'sqeuclidean':\n",
123+
" positive_distance = np.sum(positive_distance, axis=-1, keepdims=True)\n",
124+
" negative_distance = np.sum(negative_distance, axis=-1, keepdims=True)\n",
125+
" loss = positive_distance - negative_distance\n",
126+
" if margin == 'maxplus':\n",
127+
" loss = np.maximum(0.0, 1 + loss)\n",
128+
" elif margin == 'softplus':\n",
129+
" loss = np.log(1 + np.exp(loss))\n",
130+
" return np.mean(loss)\n",
131+
"\n",
132+
"def check_loss():\n",
133+
" batch_size = 10\n",
134+
" shape = (batch_size, 4096)\n",
135+
"\n",
136+
" p1 = normalize(np.random.random(shape))\n",
137+
" n = normalize(np.random.random(shape))\n",
138+
" p2 = normalize(np.random.random(shape))\n",
139+
" \n",
140+
" input_tensor = [K.variable(p1), K.variable(n), K.variable(p2)]\n",
141+
" out1 = K.eval(triplet_loss(input_tensor))\n",
142+
" input_np = [p1, n, p2]\n",
143+
" out2 = triplet_loss_np(input_np)\n",
144+
"\n",
145+
" assert out1.shape == out2.shape\n",
146+
" print(np.linalg.norm(out1))\n",
147+
" print(np.linalg.norm(out2))\n",
148+
" print(np.linalg.norm(out1-out2))"
149+
],
150+
"execution_count": 2,
151+
"outputs": []
152+
},
153+
{
154+
"cell_type": "code",
155+
"metadata": {
156+
"id": "JM-tEj92Chs_"
157+
},
158+
"source": [
159+
"tf.random.set_seed(1234)"
160+
],
161+
"execution_count": 3,
162+
"outputs": []
163+
},
164+
{
165+
"cell_type": "code",
166+
"metadata": {
167+
"id": "9nqywM79C8kt"
168+
},
169+
"source": [
170+
""
171+
],
172+
"execution_count": null,
173+
"outputs": []
70174
}
71175
]
72176
}

0 commit comments

Comments
 (0)