diff --git a/cms/grading/scoretypes/abc.py b/cms/grading/scoretypes/abc.py
index f36542606e..da5184d1f4 100644
--- a/cms/grading/scoretypes/abc.py
+++ b/cms/grading/scoretypes/abc.py
@@ -236,7 +236,7 @@ class ScoreTypeGroup(ScoreTypeAlone):
{% endif %}
- {% trans index=st["idx"] %}Subtask {{ index }}{% endtrans %}
+ {% trans name=st["name"] %}{{ name }}{% endtrans %}
{% if "score" in st and "max_score" in st %}
@@ -473,6 +473,10 @@ def compute_score(self, submission_result):
tc["show_in_oi_restricted_feedback"] = (
tc["idx"] == tc_first_lowest_idx)
+ if st_score_fraction < 1.0 and parameter[0] > 0: # display full feedback for sample cases
+ for t in testcases:
+ t["show_in_restricted_feedback"] = (t["idx"] <= restricted_feedback_idx)
+
score += st_score
subtasks.append({
"idx": st_idx,
@@ -483,6 +487,7 @@ def compute_score(self, submission_result):
# But we also want the properly rounded score for display.
"score": rounded_score,
"max_score": parameter[0],
+ "name": parameter[2],
"testcases": testcases})
if all(self.public_testcases[tc_idx] for tc_idx in target):
public_score += st_score
diff --git a/cms/server/admin/templates/macro/reevaluation_buttons.html b/cms/server/admin/templates/macro/reevaluation_buttons.html
index 9f7b299dd8..ea194a8f1e 100644
--- a/cms/server/admin/templates/macro/reevaluation_buttons.html
+++ b/cms/server/admin/templates/macro/reevaluation_buttons.html
@@ -43,7 +43,8 @@
{% endfor %}
'level': 'compilation'},
function(response) { utils.redirect_if_ok('{{ next_page }}', response); }
- );"
+ );
+ }"
{% if not allowed %}
disabled
{% endif %}
@@ -56,7 +57,8 @@
{% endfor %}
'level': 'evaluation'},
function(response) { utils.redirect_if_ok('{{ next_page }}', response); }
- );"
+ );
+ }"
{% if not allowed %}
disabled
{% endif %}
@@ -69,7 +71,8 @@
{% endfor %}
},
function(response) { utils.redirect_if_ok('{{ next_page }}', response); }
- );"
+ );
+ }"
{% if not allowed %}
disabled
{% endif %}
diff --git a/cms/server/contest/handlers/base.py b/cms/server/contest/handlers/base.py
index ebc29f8e6a..383812245d 100644
--- a/cms/server/contest/handlers/base.py
+++ b/cms/server/contest/handlers/base.py
@@ -31,6 +31,7 @@
import logging
import traceback
+from datetime import datetime, timedelta
import collections
@@ -202,7 +203,12 @@ def get(self):
# able to import new contests without having to restart CWS.
contest_list = dict()
for contest in self.sql_session.query(Contest).all():
- contest: Contest
- contest_list[contest.name] = contest
+ # We hide contests that ended more than a week ago, or start more than a week from now
+ today = datetime.now()
+ seven_days_ago = today - timedelta(days=7)
+ seven_days_ahead = today + timedelta(days=7)
+ if contest.start < seven_days_ahead and contest.stop > seven_days_ago:
+ contest_list[contest.name] = contest
+
self.render("contest_list.html", contest_list=contest_list,
**self.r_params)
diff --git a/cms/server/contest/phase_management.py b/cms/server/contest/phase_management.py
index 3fb25c86c2..1b868cb3d3 100644
--- a/cms/server/contest/phase_management.py
+++ b/cms/server/contest/phase_management.py
@@ -110,17 +110,20 @@ def compute_actual_phase(
actual_start = None
actual_stop = None
- if contest_start <= timestamp <= contest_stop:
+ adjusted_start = contest_start + delay_time
+ adjusted_stop = contest_stop + delay_time + extra_time
+
+ if adjusted_start <= timestamp <= adjusted_stop:
actual_phase = -1
- current_phase_begin = contest_start
- current_phase_end = contest_stop
- elif timestamp < contest_start:
+ current_phase_begin = adjusted_start
+ current_phase_end = adjusted_stop
+ elif timestamp < adjusted_start:
actual_phase = -2
current_phase_begin = None
- current_phase_end = contest_start
- elif contest_stop < timestamp:
+ current_phase_end = adjusted_start
+ elif adjusted_stop < timestamp:
actual_phase = +2
- current_phase_begin = contest_stop
+ current_phase_begin = adjusted_stop
current_phase_end = None
else:
raise RuntimeError("Logic doesn't seem to be working...")
@@ -130,18 +133,26 @@ def compute_actual_phase(
# "Traditional" contest.
intended_start = contest_start
intended_stop = contest_stop
+
+ # delay time shifts the window, extra time shifts just the endpoint
+ actual_start = intended_start + delay_time
+ actual_stop = intended_stop + delay_time + extra_time
else:
# "USACO-like" contest, and we already know when the user
# started/will start.
# Both values are lower- and upper-bounded to prevent the
# ridiculous situations of starting_time being set by the
# admin way before contest_start or after contest_stop.
- intended_start = min(max(starting_time,
- contest_start), contest_stop)
- intended_stop = min(max(starting_time + per_user_time,
- contest_start), contest_stop)
- actual_start = intended_start + delay_time
- actual_stop = intended_stop + delay_time + extra_time
+
+ # delay time shifts the contest_start / contest_end, but it doesn't shift the starting_time (i.e. the time that they press the button)
+ intended_start = min(max(starting_time, contest_start + delay_time),
+ contest_stop + delay_time)
+ intended_stop = min(max(starting_time + per_user_time, contest_start + delay_time),
+ contest_stop + delay_time)
+
+ actual_start = intended_start
+ actual_stop = intended_stop + extra_time
+
assert contest_start <= actual_start <= actual_stop
diff --git a/cms/server/contest/submission/workflow.py b/cms/server/contest/submission/workflow.py
index a421427eb8..0e377f9bf7 100644
--- a/cms/server/contest/submission/workflow.py
+++ b/cms/server/contest/submission/workflow.py
@@ -205,7 +205,7 @@ def accept_submission(
logger.info(f'Submission rejected: {err}')
raise UnacceptableSubmission(
N_("Invalid submission format!"),
- N_("Please select the correct files."))
+ N_("Please select the correct file and ensure that it has the correct file extension. For example, in C++, you should submit a .cpp file."))
digests: dict[str, str] = dict()
missing_codenames = required_codenames.difference(files.keys())
@@ -218,7 +218,7 @@ def accept_submission(
else:
raise UnacceptableSubmission(
N_("Invalid submission format!"),
- N_("Please select the correct files."))
+ N_("Please select the correct file and ensure that it has the correct file extension. For example, in C++, you should submit a .cpp file."))
if any(
len(content) > config.contest_web_server.max_submission_length
diff --git a/cmscontrib/loaders/__init__.py b/cmscontrib/loaders/__init__.py
index 4188da3dbb..7ca8516f6f 100644
--- a/cmscontrib/loaders/__init__.py
+++ b/cmscontrib/loaders/__init__.py
@@ -22,6 +22,7 @@
from .italy_yaml import YamlLoader
from .polygon import PolygonTaskLoader, PolygonUserLoader, PolygonContestLoader
from .tps import TpsTaskLoader
+from .ctf import CtfTaskLoader
LOADERS: dict[str, type[BaseLoader]] = dict(
@@ -32,6 +33,7 @@
PolygonUserLoader,
PolygonContestLoader,
TpsTaskLoader,
+ CtfTaskLoader
]
)
diff --git a/cmscontrib/loaders/ctf.py b/cmscontrib/loaders/ctf.py
new file mode 100644
index 0000000000..0bbc7be47a
--- /dev/null
+++ b/cmscontrib/loaders/ctf.py
@@ -0,0 +1,322 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+
+# Programming contest management system
+# Copyright © 2017 Kiarash Golezardi
+# Copyright © 2017 Amir Keivan Mohtashami
+# Copyright © 2018 Stefano Maggiolo
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+from future.builtins.disabled import * # noqa
+from future.builtins import * # noqa
+
+import io
+import json
+import logging
+import os
+import re
+import subprocess
+import zipfile
+
+from datetime import timedelta
+from tempfile import TemporaryDirectory
+
+from cms.db import Task, Dataset, Manager, Testcase, Attachment, Statement
+
+from .base_loader import TaskLoader
+
+from cmscommon.constants import SCORE_MODE_MAX_SUBTASK
+
+
+logger = logging.getLogger(__name__)
+
+
+def make_timedelta(t):
+ return timedelta(seconds=t)
+
+
+class CtfTaskLoader(TaskLoader):
+ """Loader for CTF formatted tasks.
+
+ """
+
+ short_name = 'ctf_task'
+ description = 'CTF task format'
+
+ @staticmethod
+ def detect(path):
+ """See docstring in class Loader.
+
+ """
+ return os.path.exists(os.path.join(path, "metadata.json"))
+
+ def task_has_changed(self):
+ """See docstring in class Loader.
+
+ """
+ return True
+
+ def _get_task_type_parameters_oo(self, data, evaluation_param):
+ return [
+ evaluation_param
+ ]
+
+ def _get_task_type_parameters_comm(self, data, has_grader):
+ return [
+ data["num_processes"],
+ "stub" if has_grader else "alone",
+ "fifo_io", # TODO: Support "std_io" as well
+ ]
+
+ def _get_task_type_parameters_batch(self, data, evaluation_param, has_grader):
+ return [
+ # alone: Self-sufficient
+ # grader: Compiled with grader
+ "grader" if has_grader else "alone",
+ [
+ data['input_file'] if 'input_file' in data else '',
+ data['output_file'] if 'output_file' in data else '',
+ ],
+ evaluation_param
+ ]
+
+ def get_task(self, get_statement=True):
+ """See docstring in class Loader.
+
+ """
+
+ json_src = os.path.join(self.path, 'metadata.json')
+ if not os.path.exists(json_src):
+ logger.critical('No task found.')
+ raise IOError('No task found at path %s' % json_src)
+
+ with io.open(json_src, 'rt', encoding='utf-8') as json_file:
+ data = json.load(json_file)
+ if 'cms' in data:
+ cms_specific_data = data['cms']
+ logger.info("%s", str(cms_specific_data))
+ else:
+ cms_specific_data = {}
+
+ short_name = data['short_name']
+ logger.info("Loading parameters for task %s.", short_name)
+
+ ## Args for Task object
+ args = {}
+
+ # TODO: We should probably use a friendlier name
+ args["name"] = cms_specific_data['name'] if 'name' in cms_specific_data else short_name
+ args["title"] = data['problem_name']
+
+ # Statements
+ if get_statement:
+ logger.info('Statement requested')
+
+ # Just pick english as the primary language
+ args['statements'] = dict()
+ args["primary_statements"] = ["en"]
+ digest = self.file_cacher.put_file_from_path(
+ os.path.join(self.path, 'statement.pdf'),
+ "Statement for task %s" % (short_name,))
+ args['statements']["en"] = Statement("en", digest)
+
+ # Attachments
+ args["attachments"] = dict()
+ attachments_dir = os.path.join(self.path, 'attachments')
+ if os.path.exists(attachments_dir):
+ logger.info("Attachments found")
+ for filename in sorted(os.listdir(attachments_dir)):
+ digest = self.file_cacher.put_file_from_path(
+ os.path.join(attachments_dir, filename),
+ "Attachment %s for task %s" % (filename, short_name))
+ args["attachments"][filename] = Attachment(filename, digest)
+
+ # Obtaining testcases' codename
+ # FIXME: Unzip or something?
+ td = TemporaryDirectory()
+
+ with zipfile.ZipFile(os.path.join(self.path, 'data.zip'), 'r') as zip_ref:
+ zip_ref.extractall(td.name)
+
+ testcase_codenames = sorted([
+ filename[:-3]
+ for filename in os.listdir(td.name)
+ if filename[-3:] == '.in'])
+
+ if "task_type" in data and data["task_type"] == "output_only":
+ args["submission_format"] = ["output_%s.txt" % (cn,) for cn in testcase_codenames]
+ else:
+ args["submission_format"] = ["%s.%%l" % args["name"]]
+
+ # These options cannot be configured in the CTF format.
+ # Uncomment the following to set specific values for them.
+
+ # No user tests for AIO
+ # args['max_user_test_number'] = 10
+ # args['min_user_test_interval'] = make_timedelta(60)
+ # args['min_user_test_interval'] = make_timedelta(60)
+
+ # No tokens for AIO
+ # args['token_mode'] = 'infinite'
+ # args['token_max_number'] = 100
+ # args['token_min_interval'] = make_timedelta(60)
+ # args['token_gen_initial'] = 1
+ # args['token_gen_number'] = 1
+ # args['token_gen_interval'] = make_timedelta(1800)
+ # args['token_gen_max'] = 2
+
+ # Takes best score for each subtask
+ args['score_mode'] = SCORE_MODE_MAX_SUBTASK
+
+ # Unlimited submissions per problem
+ #args['max_submission_number'] = 50
+ #args['max_user_test_number'] = 50
+
+ # 60 seconds between submissions
+ args['min_submission_interval'] = make_timedelta(60)
+
+ args['score_precision'] = 2
+
+ args['feedback_level'] = 'restricted'
+
+ task = Task(**args)
+
+ # Args for test data
+ args = dict()
+
+ args["task"] = task
+ args["description"] = "Default" # Default dataset
+ args["autojudge"] = True
+
+ MB_TO_BYTES = 1024*1024
+ if "task_type" not in data or data["task_type"] != "output_only":
+ if "timelimit" in cms_specific_data:
+ args["time_limit"] = float(cms_specific_data['timelimit'])
+ else:
+ args["time_limit"] = float(data['timelimit'])
+ if "memlimit" in cms_specific_data:
+ args["memory_limit"] = int(cms_specific_data['memlimit'])*MB_TO_BYTES
+ else:
+ args["memory_limit"] = int(data['memlimit'])*MB_TO_BYTES
+
+ args["managers"] = {}
+
+ # Graders
+ has_grader_files = False
+ managers_dir = os.path.join(self.path, 'managers')
+ logger.info("Now for manager files {}".format(managers_dir))
+ if os.path.exists(managers_dir):
+ for manager_file in os.listdir(managers_dir):
+ manager_file_path = os.path.join(managers_dir, manager_file)
+ # Directories in managers/ are checker sources, so ignore.
+ if os.path.isfile(manager_file_path):
+ logger.info("Found manager file {}".format(manager_file_path))
+ # We can make this assumption because the only non-grader
+ # manager files are checkers, which are not handled in this
+ # if statement
+ has_grader_files = True
+ manager_digest = self.file_cacher.put_file_from_path(
+ manager_file_path,
+ "Manager file %s for task %s" % (manager_file, short_name))
+ args["managers"][manager_file] = Manager(manager_file, manager_digest)
+
+ # Checker
+ # Unlike grader files, we have to compile the checker from source
+ checker_dir = os.path.join(self.path, 'managers', 'checker')
+ if os.path.isdir(checker_dir):
+ evaluation_param = "comparator"
+ subprocess.run(["make", "-C", checker_dir, "clean"])
+ subprocess.run(["make", "-C", checker_dir, "all"])
+ checker_digest = self.file_cacher.put_file_from_path(
+ os.path.join(checker_dir, "bin", "checker"),
+ "Checker for task %s" % (short_name,))
+ args["managers"]["checker"] = Manager("checker", checker_digest)
+ else:
+ evaluation_param = "diff"
+
+ # Manager
+ # Unlike grader files, we have to compile the maager from source
+ manager_dir = os.path.join(self.path, 'managers', 'manager')
+ if os.path.isdir(manager_dir):
+ subprocess.run(["make", "-C", manager_dir, "clean"])
+ subprocess.run(["make", "-C", manager_dir, "all"])
+ manager_digest = self.file_cacher.put_file_from_path(
+ os.path.join(manager_dir, "bin", "manager"),
+ "Manager for task %s" % (short_name,))
+ args["managers"]["manager"] = Manager("manager", manager_digest)
+
+ # Note that the original TPS worked with custom task type Batch2017
+ # and Communication2017 instead of Batch and Communication.
+ if "task_type" in data and data["task_type"] == "communication":
+ args["task_type"] = "Communication"
+ args["task_type_parameters"] = self._get_task_type_parameters_comm(
+ data, has_grader_files)
+ elif "task_type" in data and data["task_type"] == "output_only":
+ args["task_type"] = "OutputOnly"
+ args["task_type_parameters"] = self._get_task_type_parameters_oo(
+ data, evaluation_param)
+ else:
+ args["task_type"] = "Batch"
+ args["task_type_parameters"] = self._get_task_type_parameters_batch(
+ data, evaluation_param, has_grader_files)
+
+ # Manager (for Communcation tasks)
+ # TODO: Add support for getting the manager
+
+ # Testcases
+ args["testcases"] = {}
+
+ # Finally, upload testcases
+ for codename in testcase_codenames:
+ infile = os.path.join(td.name, "%s.in" % codename)
+ outfile = os.path.join(td.name, "%s.out" % codename)
+ if not os.path.exists(outfile):
+ logger.critical(
+ 'Could not find the output file for testcase %s', codename)
+ logger.critical('Aborting...')
+ return
+
+ input_digest = self.file_cacher.put_file_from_path(
+ infile,
+ "Input %s for task %s" % (codename, short_name))
+ output_digest = self.file_cacher.put_file_from_path(
+ outfile,
+ "Output %s for task %s" % (codename, short_name))
+ testcase = Testcase(codename, True,
+ input_digest, output_digest)
+ args["testcases"][codename] = testcase
+
+ # Score Type
+ cms_spec_path = os.path.join(self.path, 'cms_spec')
+ if not os.path.exists(cms_spec_path):
+ logger.critical('Could not find CMS spec. Aborting...')
+ return
+ with io.open(cms_spec_path, 'rt', encoding='utf-8') as f:
+ cms_spec_string = f.read()
+
+ # TODO: Support other score types
+ args["score_type"] = "GroupMin"
+ args["score_type_parameters"] = json.loads(cms_spec_string)
+
+ dataset = Dataset(**args)
+ task.active_dataset = dataset
+
+ logger.info("Task parameters loaded.")
+
+ return task
diff --git a/cmscontrib/loaders/polygon.py b/cmscontrib/loaders/polygon.py
index 29030fc994..d32dd175e3 100644
--- a/cmscontrib/loaders/polygon.py
+++ b/cmscontrib/loaders/polygon.py
@@ -22,6 +22,7 @@
import logging
import os
+import json
import shutil
import subprocess
import xml.etree.ElementTree as ET
@@ -34,7 +35,8 @@
from cms.db import Contest, User, Task, Statement, Dataset, Manager, Testcase
from cmscommon.crypto import build_password
from cmscontrib import touch
-from .base_loader import ContestLoader, TaskLoader, UserLoader, LANGUAGE_MAP
+from .base_loader import ContestLoader, TaskLoader, UserLoader
+from cmscommon.constants import SCORE_MODE_MAX_SUBTASK
logger = logging.getLogger(__name__)
@@ -43,6 +45,69 @@
def make_timedelta(t):
return timedelta(seconds=t)
+LANGUAGE_MAP = {
+ 'afrikaans': 'af',
+ 'arabic': 'ar',
+ 'armenian': 'hy',
+ 'azerbaijani': 'az',
+ 'belarusian': 'be',
+ 'bengali': 'bn',
+ 'bosnian': 'bs',
+ 'bulgarian': 'bg',
+ 'catalan': 'ca',
+ 'chinese': 'zh',
+ 'croatian': 'hr',
+ 'czech': 'cs',
+ 'danish': 'da',
+ 'dutch': 'nl',
+ 'english': 'en',
+ 'estonian': 'et',
+ 'filipino': 'fil',
+ 'finnish': 'fi',
+ 'french': 'fr',
+ 'georgian': 'ka',
+ 'german': 'de',
+ 'greek': 'el',
+ 'hebrew': 'he',
+ 'hindi': 'hi',
+ 'hungarian': 'hu',
+ 'icelandic': 'is',
+ 'indonesian': 'id',
+ 'irish': 'ga',
+ 'italian': 'it',
+ 'japanese': 'ja',
+ 'kazakh': 'kk',
+ 'korean': 'ko',
+ 'kyrgyz': 'ky',
+ 'latvian': 'lv',
+ 'lithuanian': 'lt',
+ 'macedonian': 'mk',
+ 'malay': 'ms',
+ 'mongolian': 'mn',
+ 'norwegian': 'no',
+ 'persian': 'fa',
+ 'polish': 'pl',
+ 'portuguese': 'pt',
+ 'romanian': 'ro',
+ 'russian': 'ru',
+ 'serbian': 'sr',
+ 'sinhala': 'si',
+ 'slovak': 'sk',
+ 'slovene': 'sl',
+ 'spanish': 'es',
+ 'swedish': 'sv',
+ 'tajik': 'tg',
+ 'tamil': 'ta',
+ 'thai': 'th',
+ 'turkish': 'tr',
+ 'turkmen': 'tk',
+ 'ukrainian': 'uk',
+ 'urdu': 'ur',
+ 'uzbek': 'uz',
+ 'vietnamese': 'vi',
+ 'other': 'other',
+}
+
class PolygonTaskLoader(TaskLoader):
"""Load a task stored using the Codeforces Polygon format.
@@ -143,6 +208,10 @@ def get_task(self, get_statement=True):
# args['token_gen_number'] = 1
# args['token_gen_interval'] = make_timedelta(1800)
# args['token_gen_max'] = 2
+ args['score_mode'] = SCORE_MODE_MAX_SUBTASK
+ args['feedback_level'] = 'restricted'
+ args['min_submission_interval'] = make_timedelta(60)
+
task_cms_conf_path = os.path.join(self.path, 'files', 'cms_conf.py')
task_cms_conf = None
@@ -214,17 +283,14 @@ def get_task(self, get_statement=True):
args["task_type_parameters"] = \
["alone", [infile_param, outfile_param], evaluation_param]
- args["score_type"] = "Sum"
+ args["score_type"] = "GroupMin"
+ args["score_type_parameters"] = json.loads('[[100, ".*", "Test Cases"]]')
+
total_value = 100.0
input_value = 0.0
testcases = int(testset.find('test-count').text)
- n_input = testcases
- if n_input != 0:
- input_value = total_value / n_input
- args["score_type_parameters"] = input_value
-
args["testcases"] = {}
for i in range(testcases):
diff --git a/cmscontrib/loaders/polygon/testlib.h b/cmscontrib/loaders/polygon/testlib.h
index 612a439a9d..31669c8dc3 100644
--- a/cmscontrib/loaders/polygon/testlib.h
+++ b/cmscontrib/loaders/polygon/testlib.h
@@ -42,22 +42,46 @@
*/
/*
- * Artem Iglikov
- * Alexander Kernozhitsky
- * Andrey Vihrov
+ * Kian Mirjalali:
*
- * Modifications for Contest Management System (CMS) support:
- * - Write checker outcome to stdout and message to stderr
- * - Use special localizable message strings by default
- * - Adjust checker exit code
- * - Adjust checker argument order (except in help and comments)
- * - Add the "CMS" conditional macro for CMS checker format
- * - Add the "CMS_VERBOSE_FEEDBACK" conditional macro to enable
- * testlib-style messages
- * - Interactors are not supported
+ * Modified to be compatible with CMS & requirements for preparing IOI tasks
+ *
+ * * defined FOR_LINUX in order to force linux-based line endings in validators.
+ *
+ * * Changed the ordering of checker arguments
+ * from
+ * to
+ *
+ * * Added "Security Violation" & "Protocol Violation" as new result types.
+ *
+ * * Changed checker quit behaviors to make it compliant with CMS.
+ *
+ * * The checker exit codes should always be 0 in CMS.
+ *
+ * * For partial scoring, forced quitp() functions to accept only scores in the range [0,1].
+ * If the partial score is less than 1e-5, it becomes 1e-5, because 0 grades are considered wrong in CMS.
+ * Grades in range [1e-5, 0.0001) are printed exactly (to prevent rounding to zero).
+ * Grades in [0.0001, 1] are printed with 4 digits after decimal point.
+ *
+ * * Added the following utility types/variables/functions/methods:
+ * type HaltListener (as a function with no parameters or return values)
+ * vector __haltListeners
+ * void registerHaltListener(HaltListener haltListener)
+ * void callHaltListeners() (which is called in quit)
+ * void closeOnHalt(FILE* file)
+ * void closeOnHalt(F& f) (template using f.close())
+ * void InStream::readSecret(string secret, TResult mismatchResult, string mismatchMessage)
+ * void InStream::readGraderResult()
+ * +supporting conversion of graderResult to CMS result
+ * void quitp(double), quitp(int)
+ * void registerChecker(string probName, argc, argv)
+ * void readBothSecrets(string secret)
+ * void readBothGraderResults()
+ * void quit(TResult)
+ * bool compareTokens(string a, string b, char separator=' ')
+ * void compareRemainingLines(int lineNo=1)
+ * void skip_ok()
*
- * Backports:
- * - 1bcfacc3a97667b38a3aef9d95b97edc6a9db688 (MikeMirzayanov/testlib#79)
*/
/* NOTE: This file contains testlib library for C++.
@@ -82,7 +106,6 @@
*/
const char* latestFeatures[] = {
- "Fixed issue #79: fixed missed guard against repeated header include",
"Fixed stringstream repeated usage issue",
"Fixed compilation in g++ (for std=c++03)",
"Batch of println functions (support collections, iterator ranges)",
@@ -154,6 +177,8 @@ const char* latestFeatures[] = {
"Added compatibility with Contester (compile with CONTESTER directive)"
};
+#define FOR_LINUX
+
#ifdef _MSC_VER
#define _CRT_SECURE_NO_DEPRECATE
#define _CRT_SECURE_NO_WARNINGS
@@ -168,6 +193,7 @@ const char* latestFeatures[] = {
#include
#undef random
+#include
#include
#include
#include
@@ -275,6 +301,14 @@ const char* latestFeatures[] = {
# define UNEXPECTED_EOF_EXIT_CODE 8
#endif
+#ifndef SV_EXIT_CODE
+# define SV_EXIT_CODE 20
+#endif
+
+#ifndef PV_EXIT_CODE
+# define PV_EXIT_CODE 21
+#endif
+
#ifndef PC_BASE_EXIT_CODE
# ifdef TESTSYS
# define PC_BASE_EXIT_CODE 50
@@ -304,7 +338,40 @@ const char* latestFeatures[] = {
#else
# define NORETURN
#endif
-
+
+/**************** HaltListener material ****************/
+#if __cplusplus > 199711L || defined(_MSC_VER)
+typedef std::function HaltListener;
+#else
+typedef void (*HaltListener)();
+#endif
+
+std::vector __haltListeners;
+
+inline void registerHaltListener(HaltListener haltListener) {
+ __haltListeners.push_back(haltListener);
+}
+
+inline void callHaltListeners() {
+ // Removing and calling haltListeners in reverse order.
+ while (!__haltListeners.empty()) {
+ HaltListener haltListener = __haltListeners.back();
+ __haltListeners.pop_back();
+ haltListener();
+ }
+}
+
+#if __cplusplus > 199711L || defined(_MSC_VER)
+inline void closeOnHalt(FILE* file) {
+ registerHaltListener([file] { fclose(file); });
+}
+template
+inline void closeOnHalt(F& f) {
+ registerHaltListener([&f] { f.close(); });
+}
+#endif
+/*******************************************************/
+
static char __testlib_format_buffer[16777216];
static int __testlib_format_buffer_usage_count = 0;
@@ -1422,6 +1489,8 @@ enum TResult
_dirt = 4,
_points = 5,
_unexpected_eof = 8,
+ _sv = 10,
+ _pv = 11,
_partially = 16
};
@@ -1448,8 +1517,8 @@ const std::string outcomes[] = {
"reserved",
"unexpected-eof",
"reserved",
- "reserved",
- "reserved",
+ "security-violation",
+ "protocol-violation",
"reserved",
"reserved",
"reserved",
@@ -1919,6 +1988,8 @@ struct InStream
std::vector readIntegers(int size, int minv, int maxv, const std::string& variablesName = "", int indexBase = 1);
/* Reads space-separated sequence of integers. */
std::vector readInts(int size, int minv, int maxv, const std::string& variablesName = "", int indexBase = 1);
+ /* Reads space-separated sequence of integers. */
+ std::vector readInts(int size, int indexBase = 1);
/*
* Reads new double. Ignores white-spaces into the non-strict mode
@@ -2018,6 +2089,13 @@ struct InStream
* input/answer streams replace any result to FAIL.
*/
NORETURN void quitf(TResult result, const char* msg, ...);
+
+ /*
+ * Quit-functions aborts program with and :
+ * input/answer streams replace any result to FAIL.
+ */
+ void quitif(bool condition, TResult result, const char *msg, ...);
+
/*
* Quit-functions aborts program with and :
* input/answer streams replace any result to FAIL.
@@ -2043,15 +2121,20 @@ struct InStream
const static WORD LightCyan = 0x0b;
const static WORD LightGreen = 0x0a;
const static WORD LightYellow = 0x0e;
+ const static WORD LightMagenta = 0x0d;
static void textColor(WORD color);
static void quitscr(WORD color, const char* msg);
static void quitscrS(WORD color, std::string msg);
void xmlSafeWrite(std::FILE * file, const char* msg);
+ void readSecret(std::string secret, TResult mismatchResult=_pv, std::string mismatchMessage="Secret mismatch");
+ void readGraderResult();
+
private:
InStream(const InStream&);
InStream& operator =(const InStream&);
+ void quitByGraderResult(TResult result, std::string defaultMessage);
};
InStream inf;
@@ -2373,6 +2456,8 @@ __attribute__((const))
#endif
int resultExitCode(TResult r)
{
+ if (testlibMode == _checker)
+ return 0;//CMS Checkers should always finish with zero exit code.
if (r == _ok)
return OK_EXIT_CODE;
if (r == _wa)
@@ -2391,6 +2476,10 @@ int resultExitCode(TResult r)
#else
return PE_EXIT_CODE;
#endif
+ if (r == _sv)
+ return SV_EXIT_CODE;
+ if (r == _pv)
+ return PV_EXIT_CODE;
if (r >= _partially)
return PC_BASE_EXIT_CODE + (r - _partially);
return FAIL_EXIT_CODE;
@@ -2424,6 +2513,9 @@ void InStream::textColor(
case LightYellow:
fprintf(stderr, "\033[1;33m");
break;
+ case LightMagenta:
+ fprintf(stderr, "\033[1;35m");
+ break;
case LightGray:
default:
fprintf(stderr, "\033[0m");
@@ -2434,6 +2526,7 @@ void InStream::textColor(
NORETURN void halt(int exitCode)
{
+ callHaltListeners();
#ifdef FOOTER
InStream::textColor(InStream::LightGray);
std::fprintf(stderr, "Checker: \"%s\"\n", checkerName.c_str());
@@ -2448,6 +2541,14 @@ static bool __testlib_shouldCheckDirt(TResult result)
return result == _ok || result == _points || result >= _partially;
}
+
+std::string RESULT_MESSAGE_CORRECT = "Output is correct";
+std::string RESULT_MESSAGE_PARTIALLY_CORRECT = "Output is partially correct";
+std::string RESULT_MESSAGE_WRONG = "Output isn't correct";
+std::string RESULT_MESSAGE_SECURITY_VIOLATION = "Security Violation";
+std::string RESULT_MESSAGE_PROTOCOL_VIOLATION = "Protocol Violation";
+std::string RESULT_MESSAGE_FAIL = "Judge Failure; Contact staff!";
+
NORETURN void InStream::quit(TResult result, const char* msg)
{
if (TestlibFinalizeGuard::alive)
@@ -2488,97 +2589,96 @@ NORETURN void InStream::quit(TResult result, const char* msg)
int pctype = result - _partially;
bool isPartial = false;
-#ifdef CMS
- inf.close();
- ouf.close();
- ans.close();
- if (tout.is_open())
- tout.close();
-
-# define CMS_SUCCESS "success"
-# define CMS_PARTIAL "partial"
-# define CMS_WRONG "wrong"
-# ifndef CMS_VERBOSE_FEEDBACK
-# define CMS_MSG(code, text) "translate:" code "\n"
-# else
-# define CMS_MSG(code, text) text " %s\n", msg
-# endif
-
- if (result == _ok) {
- std::fprintf(stdout, "1.0\n");
- std::fprintf(stderr, CMS_MSG(CMS_SUCCESS, "OK"));
- } else if (result == _wa) {
- std::fprintf(stdout, "0.0\n");
- std::fprintf(stderr, CMS_MSG(CMS_WRONG, "Wrong Answer"));
- } else if (result == _pe) {
- std::fprintf(stdout, "0.0\n");
- std::fprintf(stderr, CMS_MSG(CMS_WRONG, "Presentation Error"));
- } else if (result == _dirt) {
- std::fprintf(stdout, "0.0\n");
- std::fprintf(stderr, CMS_MSG(CMS_WRONG, "Wrong Output Format"));
- } else if (result == _points) {
- std::string stringPoints(removeDoubleTrailingZeroes(
- format("%.10f", __testlib_points)));
- std::fprintf(stdout, "%s\n", stringPoints.c_str());
- std::fprintf(stderr, CMS_MSG(CMS_PARTIAL, "Partial Score"));
- } else if (result == _unexpected_eof) {
- std::fprintf(stdout, "0.0\n");
- std::fprintf(stderr, CMS_MSG(CMS_WRONG, "Unexpected EOF"));
- } else if (result >= _partially) {
- double score = (double)pctype / 200.0;
- std::fprintf(stdout, "%.3f\n", score);
- std::fprintf(stderr, CMS_MSG(CMS_PARTIAL, "Partial Score"));
- } else if (result == _fail) {
- std::fprintf(stderr, "FAIL %s\n", msg);
- halt(1);
+ if (testlibMode == _checker) {
+ WORD color;
+ std::string pointsStr = "0";
+ switch (result)
+ {
+ case _ok:
+ pointsStr = format("%d", 1);
+ color = LightGreen;
+ errorName = RESULT_MESSAGE_CORRECT;
+ break;
+ case _wa:
+ case _pe:
+ case _dirt:
+ case _unexpected_eof:
+ color = LightRed;
+ errorName = RESULT_MESSAGE_WRONG;
+ break;
+ case _fail:
+ color = LightMagenta;
+ errorName = RESULT_MESSAGE_FAIL;
+ break;
+ case _sv:
+ color = LightMagenta;
+ errorName = RESULT_MESSAGE_SECURITY_VIOLATION;
+ break;
+ case _pv:
+ color = LightMagenta;
+ errorName = RESULT_MESSAGE_PROTOCOL_VIOLATION;
+ break;
+ case _points:
+ if (__testlib_points < 1e-5)
+ pointsStr = "0.00001"; // Prevent zero scores in CMS as zero is considered wrong
+ else if (__testlib_points < 0.0001)
+ pointsStr = format("%lf", __testlib_points); // Prevent rounding the numbers below 0.0001
+ else
+ pointsStr = format("%.4lf", __testlib_points);
+ color = LightYellow;
+ errorName = RESULT_MESSAGE_PARTIALLY_CORRECT;
+ break;
+ default:
+ if (result >= _partially)
+ quit(_fail, "testlib partially mode not supported");
+ else
+ quit(_fail, "What is the code ??? ");
+ }
+ std::fprintf(stdout, "%s\n", pointsStr.c_str());
+ quitscrS(color, errorName);
+ std::fprintf(stderr, "\n");
} else {
- std::fprintf(stderr, "FAIL unknown result %d\n", (int)result);
- halt(1);
- }
-
- halt(0);
-#endif
-
- switch (result)
- {
- case _ok:
- errorName = "ok ";
- quitscrS(LightGreen, errorName);
- break;
- case _wa:
- errorName = "wrong answer ";
- quitscrS(LightRed, errorName);
- break;
- case _pe:
- errorName = "wrong output format ";
- quitscrS(LightRed, errorName);
- break;
- case _fail:
- errorName = "FAIL ";
- quitscrS(LightRed, errorName);
- break;
- case _dirt:
- errorName = "wrong output format ";
- quitscrS(LightCyan, errorName);
- result = _pe;
- break;
- case _points:
- errorName = "points ";
- quitscrS(LightYellow, errorName);
- break;
- case _unexpected_eof:
- errorName = "unexpected eof ";
- quitscrS(LightCyan, errorName);
- break;
- default:
- if (result >= _partially)
+ switch (result)
{
- errorName = format("partially correct (%d) ", pctype);
- isPartial = true;
+ case _ok:
+ errorName = "ok ";
+ quitscrS(LightGreen, errorName);
+ break;
+ case _wa:
+ errorName = "wrong answer ";
+ quitscrS(LightRed, errorName);
+ break;
+ case _pe:
+ errorName = "wrong output format ";
+ quitscrS(LightRed, errorName);
+ break;
+ case _fail:
+ errorName = "FAIL ";
+ quitscrS(LightRed, errorName);
+ break;
+ case _dirt:
+ errorName = "wrong output format ";
+ quitscrS(LightCyan, errorName);
+ result = _pe;
+ break;
+ case _points:
+ errorName = "points ";
quitscrS(LightYellow, errorName);
+ break;
+ case _unexpected_eof:
+ errorName = "unexpected eof ";
+ quitscrS(LightCyan, errorName);
+ break;
+ default:
+ if (result >= _partially)
+ {
+ errorName = format("partially correct (%d) ", pctype);
+ isPartial = true;
+ quitscrS(LightYellow, errorName);
+ }
+ else
+ quit(_fail, "What is the code ??? ");
}
- else
- quit(_fail, "What is the code ??? ");
}
if (resultName != "")
@@ -2638,6 +2738,16 @@ NORETURN void InStream::quitf(TResult result, const char* msg, ...)
InStream::quit(result, message.c_str());
}
+#ifdef __GNUC__Add
+__attribute__ ((format (printf, 4, 5)))
+#endif
+void InStream::quitif(bool condition, TResult result, const char *msg, ...) {
+ if (condition) {
+ FMT_TO_RESULT(msg, msg, message);
+ InStream::quit(result, message.c_str());
+ }
+}
+
NORETURN void InStream::quits(TResult result, std::string msg)
{
InStream::quit(result, msg.c_str());
@@ -3404,6 +3514,10 @@ std::vector InStream::readInts(int size, int minv, int maxv, const std::str
__testlib_readMany(readInts, readInt(minv, maxv, variablesName), int, true)
}
+std::vector InStream::readInts(int size, int indexBase) {
+ __testlib_readMany(readInts, readInt(), int, true)
+}
+
std::vector InStream::readIntegers(int size, int minv, int maxv, const std::string& variablesName, int indexBase)
{
__testlib_readMany(readIntegers, readInt(minv, maxv, variablesName), int, true)
@@ -3824,8 +3938,15 @@ NORETURN void quit(TResult result, const char* msg)
ouf.quit(result, msg);
}
+#ifdef __GNUC__
+__attribute__ ((format (printf, 2, 3)))
+#endif
+NORETURN void quitf(TResult result, const char* format, ...);
+
NORETURN void __testlib_quitp(double points, const char* message)
{
+ if (points<0 || points>1)
+ quitf(_fail, "wrong points: %lf, it must be in [0,1]", points);
__testlib_points = points;
std::string stringPoints = removeDoubleTrailingZeroes(format("%.10f", points));
@@ -3833,13 +3954,15 @@ NORETURN void __testlib_quitp(double points, const char* message)
if (NULL == message || 0 == strlen(message))
quitMessage = stringPoints;
else
- quitMessage = stringPoints + " " + message;
+ quitMessage = message;
quit(_points, quitMessage.c_str());
}
NORETURN void __testlib_quitp(int points, const char* message)
{
+ if (points<0 || points>1)
+ quitf(_fail, "wrong points: %d, it must be in [0,1]", points);
__testlib_points = points;
std::string stringPoints = format("%d", points);
@@ -3847,7 +3970,7 @@ NORETURN void __testlib_quitp(int points, const char* message)
if (NULL == message || 0 == strlen(message))
quitMessage = stringPoints;
else
- quitMessage = stringPoints + " " + message;
+ quitMessage = message;
quit(_points, quitMessage.c_str());
}
@@ -3882,6 +4005,13 @@ NORETURN void quitp(F points, const char* format, ...)
quitp(points, message);
}
+template
+NORETURN void quitp(F points)
+{
+ __testlib_quitp(points, std::string(""));
+}
+
+
#ifdef __GNUC__
__attribute__ ((format (printf, 2, 3)))
#endif
@@ -3920,7 +4050,7 @@ NORETURN void __testlib_help()
std::fprintf(stderr, "\n");
std::fprintf(stderr, "Program must be run with the following arguments: \n");
- std::fprintf(stderr, " [ [<-appes>]]\n\n");
+ std::fprintf(stderr, " [ [<-appes>]]\n\n");
std::exit(FAIL_EXIT_CODE);
}
@@ -3993,10 +4123,6 @@ void registerGen(int argc, char* argv[])
void registerInteraction(int argc, char* argv[])
{
-#ifdef CMS
- quit(_fail, "Interactors are not supported");
-#endif
-
__testlib_ensuresPreconditions();
testlibMode = _interactor;
@@ -4125,7 +4251,7 @@ void registerTestlibCmd(int argc, char* argv[])
if (argc < 4 || argc > 6)
{
quit(_fail, std::string("Program must be run with the following arguments: ") +
- std::string(" [ [<-appes>]]") +
+ std::string(" [ [<-appes>]]") +
"\nUse \"--help\" to get help information");
}
@@ -4146,7 +4272,7 @@ void registerTestlibCmd(int argc, char* argv[])
if (strcmp("-APPES", argv[5]) && strcmp("-appes", argv[5]))
{
quit(_fail, std::string("Program must be run with the following arguments: ") +
- " [ [<-appes>]]");
+ " [ [<-appes>]]");
}
else
{
@@ -4156,20 +4282,15 @@ void registerTestlibCmd(int argc, char* argv[])
}
inf.init(argv[1], _input);
-#ifdef CMS
- ouf.init(argv[3], _output);
ans.init(argv[2], _answer);
-#else
- ouf.init(argv[2], _output);
- ans.init(argv[3], _answer);
-#endif
+ ouf.init(argv[3], _output);
}
void registerTestlib(int argc, ...)
{
if (argc < 3 || argc > 5)
quit(_fail, std::string("Program must be run with the following arguments: ") +
- " [ [<-appes>]]");
+ " [ [<-appes>]]");
char** argv = new char*[argc + 1];
@@ -4568,6 +4689,8 @@ NORETURN void expectedButFound(TResult result, long double expected
__testlib_expectedButFound(result, double(expected), double(found), prepend.c_str());
}
+#endif
+
#if __cplusplus > 199711L || defined(_MSC_VER)
template
struct is_iterable
@@ -4757,4 +4880,124 @@ void println(const A& a, const B& b, const C& c, const D& d, const E& e, const F
std::cout << std::endl;
}
#endif
-#endif
+
+
+
+void registerChecker(std::string probName, int argc, char* argv[])
+{
+ setName("checker for problem %s", probName.c_str());
+ registerTestlibCmd(argc, argv);
+}
+
+
+
+const std::string _grader_OK = "OK";
+const std::string _grader_SV = "SV";
+const std::string _grader_PV = "PV";
+const std::string _grader_WA = "WA";
+const std::string _grader_FAIL = "FAIL";
+
+
+void InStream::readSecret(std::string secret, TResult mismatchResult, std::string mismatchMessage)
+{
+ if (readWord() != secret)
+ quits(mismatchResult, mismatchMessage);
+ eoln();
+}
+
+void readBothSecrets(std::string secret)
+{
+ ans.readSecret(secret, _fail, "Secret mismatch in the (correct) answer file");
+ ouf.readSecret(secret, _pv, "Possible tampering with the output");
+}
+
+
+void InStream::quitByGraderResult(TResult result, std::string defaultMessage)
+{
+ std::string msg = "";
+ if (!eof())
+ msg = readLine();
+ if (msg.empty())
+ quits(result, defaultMessage);
+ quits(result, msg);
+}
+
+void InStream::readGraderResult()
+{
+ std::string result = readWord();
+ eoln();
+ if (result == _grader_OK)
+ return;
+ if (result == _grader_SV)
+ quitByGraderResult(_sv, "Security violation detected in grader");
+ if (result == _grader_PV)
+ quitByGraderResult(_pv, "Protocol violation detected in grader");
+ if (result == _grader_WA)
+ quitByGraderResult(_wa, "Wrong answer detected in grader");
+ if (result == _grader_FAIL)
+ quitByGraderResult(_fail, "Failure in grader");
+ quitf(_fail, "Unknown grader result");
+}
+
+void readBothGraderResults()
+{
+ ans.readGraderResult();
+ ouf.readGraderResult();
+}
+
+
+NORETURN void quit(TResult result)
+{
+ ouf.quit(result, "");
+}
+
+/// Used in validators: skips the rest of input, assuming it to be correct
+NORETURN void skip_ok()
+{
+ if (testlibMode != _validator)
+ quitf(_fail, "skip_ok() only works in validators");
+ testlibFinalizeGuard.quitCount++;
+ halt(0);
+}
+
+/// 1 -> 1st, 2 -> 2nd, 3 -> 3rd, 4 -> 4th, ...
+std::string englishTh(int x)
+{
+ char c[100];
+ sprintf(c, "%d%s", x, englishEnding(x).c_str());
+ return c;
+}
+
+/// Compares the tokens of two lines
+void compareTokens(int lineNo, std::string a, std::string b, char separator=' ')
+{
+ std::vector toka = tokenize(a, separator);
+ std::vector tokb = tokenize(b, separator);
+ if (toka == tokb)
+ return;
+ std::string dif = format("%s lines differ - ", englishTh(lineNo).c_str());
+ if (toka.size() != tokb.size())
+ quitf(_wa, "%sexpected: %d tokens, found %d tokens", dif.c_str(), int(toka.size()), int(tokb.size()));
+ for (int i=0; i