Skip to content

Commit 9336bdc

Browse files
Allow to submit metadata in docker-based submissions (#379)
* Add docker based submission fix * Fix mistakes * Json loads fix * Input fix * Update test_submissions.py * update changes --------- Co-authored-by: Gunjan Chhablani <[email protected]>
1 parent 7e897ce commit 9336bdc

File tree

5 files changed

+228
-17
lines changed

5 files changed

+228
-17
lines changed

evalai/submissions.py

Lines changed: 142 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
display_submission_details,
2121
display_submission_result,
2222
convert_bytes_to,
23+
get_submission_meta_attributes,
2324
)
2425
from evalai.utils.urls import URLS
2526
from evalai.utils.config import (
@@ -126,6 +127,147 @@ def push(image, phase, url, public, private):
126127
max_docker_image_size = response.get("max_docker_image_size")
127128

128129
docker_image_size = docker_image.__dict__.get("attrs").get("VirtualSize")
130+
# Prompt for submission details
131+
if click.confirm("Do you want to include the Submission Details?"):
132+
submission_metadata["method_name"] = click.prompt(
133+
style("Method Name", fg="yellow"), type=str, default=""
134+
)
135+
submission_metadata["method_description"] = click.prompt(
136+
style("Method Description", fg="yellow"),
137+
type=str,
138+
default="",
139+
)
140+
submission_metadata["project_url"] = click.prompt(
141+
style("Project URL", fg="yellow"), type=str, default=""
142+
)
143+
submission_metadata["publication_url"] = click.prompt(
144+
style("Publication URL", fg="yellow"), type=str, default=""
145+
)
146+
147+
submission_meta_attributes = get_submission_meta_attributes(
148+
challenge_pk, phase_pk
149+
)
150+
151+
submission_attribute_metadata = []
152+
153+
if (submission_meta_attributes and len(submission_meta_attributes) > 0):
154+
if click.confirm(
155+
"Do you want to include the Submission Metadata?"
156+
):
157+
for attribute in submission_meta_attributes:
158+
attribute_type = attribute["type"]
159+
attribute_name = attribute["name"]
160+
attribute_description = attribute["description"]
161+
attribute_required = attribute.get("required")
162+
attribute_data = {
163+
'name': attribute_name,
164+
'type': attribute_type,
165+
'description': attribute_description,
166+
'required': attribute_required,
167+
}
168+
if attribute_required:
169+
attribute_name = attribute_name + '*'
170+
value = None
171+
message = "{} ({})".format(
172+
attribute_name, attribute_description
173+
)
174+
if attribute_type == "text":
175+
while True:
176+
value = click.prompt(
177+
style(message, fg="yellow"),
178+
type=str,
179+
default="",
180+
)
181+
if not attribute_required or value != "":
182+
break
183+
echo(
184+
"Error: {} is a required field".format(
185+
attribute["name"]
186+
)
187+
)
188+
attribute_data['value'] = value
189+
if attribute_type == "boolean":
190+
while True:
191+
value = click.prompt(
192+
style(message, fg="yellow"), type=bool, default=""
193+
)
194+
if not attribute_required or value != "":
195+
break
196+
echo(
197+
"Error: {} is a required field".format(
198+
attribute["name"]
199+
)
200+
)
201+
attribute_data['value'] = value
202+
if attribute_type == "radio":
203+
while True:
204+
value = click.prompt(
205+
style(
206+
"{}:\nChoices:{}".format(
207+
message, attribute["options"]
208+
),
209+
fg="yellow",
210+
),
211+
type=click.Choice(attribute["options"]),
212+
default=""
213+
)
214+
if not attribute_required or value != "":
215+
break
216+
echo(
217+
"Error: {} is a required field".format(
218+
attribute["name"]
219+
)
220+
)
221+
attribute_data['options'] = attribute['options']
222+
attribute_data['value'] = value
223+
if attribute_type == "checkbox":
224+
option_chosen = True
225+
while option_chosen:
226+
value = []
227+
choices = click.prompt(
228+
style(
229+
"{}:\nChoices(separated by comma):{}".format(
230+
message, attribute["options"]
231+
),
232+
fg="yellow",
233+
),
234+
type=str,
235+
show_default=False,
236+
default=""
237+
)
238+
if choices != "":
239+
choices = [
240+
choice.strip(" ")
241+
for choice in choices.split(",")
242+
]
243+
else:
244+
choices = []
245+
option_chosen = False
246+
if attribute_required and len(choices) == 0:
247+
echo(
248+
"Error: {} is a required field. Please select atleast one option".format(
249+
attribute["name"]
250+
)
251+
)
252+
option_chosen = True
253+
for choice in choices:
254+
if choice in attribute["options"]:
255+
value.append(choice)
256+
option_chosen = False
257+
else:
258+
echo(
259+
"Error: Choose correct value(s) from the given options only"
260+
)
261+
option_chosen = True
262+
break
263+
attribute_data['options'] = attribute['options']
264+
attribute_data['values'] = value
265+
submission_attribute_metadata.append(attribute_data)
266+
267+
# After collecting submission_attribute_metadata
268+
if submission_attribute_metadata:
269+
submission_metadata["submission_meta_attributes"] = submission_attribute_metadata
270+
129271
if docker_image_size > max_docker_image_size:
130272
max_docker_image_size = convert_bytes_to(max_docker_image_size, "gb")
131273
message = "\nError: Image is too large. The maximum image size allowed is {} GB".format(

evalai/utils/challenges.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -442,7 +442,7 @@ def pretty_print_challenge_phase_data(phase):
442442

443443
title = "{} {} {}".format(phase_title, challenge_id, phase_id)
444444

445-
cleaned_desc = BeautifulSoup(phase["description"], "lxml").text
445+
cleaned_desc = BeautifulSoup(phase["description"], "html.parser").text
446446
description = "{}\n".format(cleaned_desc)
447447

448448
start_date = "Start Date : {}".format(

evalai/utils/common.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,7 @@ def clean_data(data):
135135
"""
136136
Strip HTML and clean spaces
137137
"""
138-
data = BeautifulSoup(data, "lxml").text.strip()
138+
data = BeautifulSoup(data, "html.parser").text.strip()
139139
data = " ".join(data.split()).encode("utf-8")
140140
return data
141141

tests/data/challenge_response.py

Lines changed: 37 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -314,7 +314,43 @@
314314
"is_public": true,
315315
"is_active": true,
316316
"codename": "test2019",
317-
"slug": "philip-phase-2019"
317+
"slug": "philip-phase-2019",
318+
"submission_meta_attributes": [
319+
{
320+
"name": "TextAttribute",
321+
"type": "text",
322+
"required": "True",
323+
"description": "Sample"
324+
},
325+
{
326+
"name": "SingleOptionAttribute",
327+
"type": "radio",
328+
"options": [
329+
"A",
330+
"B",
331+
"C"
332+
],
333+
"required": "True",
334+
"description": "Sample"
335+
},
336+
{
337+
"name": "MultipleChoiceAttribute",
338+
"type": "checkbox",
339+
"options": [
340+
"alpha",
341+
"beta",
342+
"gamma"
343+
],
344+
"required": "True",
345+
"description": "Sample"
346+
},
347+
{
348+
"name": "TrueFalseField",
349+
"type": "boolean",
350+
"required": "True",
351+
"description": "Sample"
352+
}
353+
]
318354
}
319355
"""
320356

tests/test_submissions.py

Lines changed: 47 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
from click.testing import CliRunner
99
from datetime import datetime
1010
from dateutil import tz
11+
from unittest.mock import patch
1112

1213
from evalai.challenges import challenge
1314
from evalai.submissions import submission, push
@@ -371,25 +372,57 @@ def test_make_submission_for_docker_based_challenge_teardown():
371372
return (registry_port, image_tag)
372373

373374
@responses.activate
374-
def test_make_submission_for_docker_based_challenge(
375+
def test_make_submission_for_docker_based_challenge_without_submission_metadata(
375376
self, test_make_submission_for_docker_based_challenge_setup
376377
):
377378
registry_port, image_tag = (
378379
test_make_submission_for_docker_based_challenge_setup
379380
)
380-
runner = CliRunner()
381-
with runner.isolated_filesystem():
382-
result = runner.invoke(
383-
push,
384-
[
385-
image_tag,
386-
"-p",
387-
"philip-phase-2019",
388-
"-u",
389-
"localhost:{0}".format(registry_port),
390-
],
391-
)
392-
assert result.exit_code == 0
381+
with patch(
382+
'evalai.submissions.get_submission_meta_attributes',
383+
return_value=json.loads(challenge_response.challenge_phase_details_slug)["submission_meta_attributes"]
384+
):
385+
runner = CliRunner()
386+
with runner.isolated_filesystem():
387+
result = runner.invoke(
388+
push,
389+
[
390+
image_tag,
391+
"-p",
392+
"philip-phase-2019",
393+
"-u",
394+
"localhost:{0}".format(registry_port),
395+
],
396+
input="N\nN\n",
397+
)
398+
assert result.exit_code == 0
399+
400+
@responses.activate
401+
def test_make_submission_for_docker_based_challenge_with_submission_metadata(
402+
self, test_make_submission_for_docker_based_challenge_setup
403+
):
404+
registry_port, image_tag = (
405+
test_make_submission_for_docker_based_challenge_setup
406+
)
407+
with patch(
408+
'evalai.submissions.get_submission_meta_attributes',
409+
return_value=json.loads(challenge_response.challenge_phase_details_slug)["submission_meta_attributes"]
410+
):
411+
runner = CliRunner()
412+
with runner.isolated_filesystem():
413+
result = runner.invoke(
414+
push,
415+
[
416+
image_tag,
417+
"-p",
418+
"philip-phase-2019",
419+
"-u",
420+
"localhost:{0}".format(registry_port),
421+
"--public"
422+
],
423+
input="\nY\nTest\nTest\nTest\nTest\nY\nTest\nA\nalpha\nTrue\n",
424+
)
425+
assert result.exit_code == 0
393426

394427
@responses.activate
395428
def test_make_submission_using_presigned_url(self, request):

0 commit comments

Comments
 (0)