diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..2727a86 --- /dev/null +++ b/.gitignore @@ -0,0 +1,25 @@ +* emacs temp files +*~ + +* MAC +*.DS_STORE +Thumbs.db + +*.zip +*.exe +a.out +*.o +*.orig +*.bak +*.bat + +# libinteractive +*.cbp +*.layout +*.bat +libinteractive +libinteractive.jar +Makefile + +.vscode +results \ No newline at end of file diff --git a/.lint.config.json b/.lint.config.json new file mode 100644 index 0000000..af010c0 --- /dev/null +++ b/.lint.config.json @@ -0,0 +1,35 @@ +{ + "lint": { + "clang-format": { + "allowlist": [".*\\.cpp$"] + }, + "karel": { + "allowlist": [".*\\.(kp|kj)$"] + }, + "json": { + "allowlist": [".*\\.json$"] + }, + "markdown": { + "allowlist": [".*\\\\Palindromos\\\\.*\\.cpp"] + }, + "python": { + "allowlist": [".*\\.py$"], + "pylint_config": ".pylintrc" + }, + "problematic-terms": { + "terms": [ + { + "regexps": ["^[ \\t]*using\\s+namespace\\s+std\\s*;[ \\t]*$"], + "message": "El uso de `using namespace std;` es no-deseable.\n\nEl comité de C++ se reserva el derecho de usar cualquier nombre en `std::`, porque hacer `using namespace std;` es una causa común de errores al momento de cambiar de compilador. Por ejemplo, C++17 introdujo `std::count()`[1], así que todos los códigos de C++ que usan `using namespace std;` y declaran algo con el nombre `count` tienen un CE instantáneo.\n\n1: https://en.cppreference.com/w/cpp/algorithm/count" + }, + { + "regexps": [ + "^[ \\t]*#\\s*include\\s*<\\s*bits/stdc\\+\\+\\.h\\s*>[ \\t]*$" + ], + "message": "El uso de `bits/stdc++.h` es no-deseable.\n\nTodas las cabeceras de `bits/` son específicas al compilador y la arquitectura. Una causa común de errores de compilación locales es el uso de esta librería." + } + ], + "allowlist": [".*\\\\Palindromos\\\\.*\\.cpp"] + } + } +} diff --git a/README.md b/README.md index b0600bb..386f3f2 100644 --- a/README.md +++ b/README.md @@ -1 +1,22 @@ -# public-courses +# Public Courses on Omegaup +This repository contains the course content for the public courses on Omegaup. + +## How to Contribute +You can contribute to the courses by adding new content, fixing typos, or improving existing materials. To do so, please follow these steps: +1. Fork the repository. +2. Create a new branch for your changes. +3. Make your changes and commit them with a clear message. +4. Push your changes to your forked repository. +5. Create a pull request to the main repository. + +## How to Sync Courses: +If you think that a course content in this repository has ran out of sync with the omegaup.com you can raise a pull request to sync the content. +To do this, follow these steps: +1. Fork the repository. +2. Create a new branch for your changes. +3. Edit the `sync-course.json` file to include the course name and the specific content that is out of sync. +4. Commit your changes with a clear message. +5. Push your changes to your forked repository. +6. Create a pull request to the main repository with target branch sync-course. +7. When your pull request is merged a github action will run to sync the course content with omegaup.com and add commit to your pull request. +8. Now the admins will merge this pull request to the main branch. \ No newline at end of file diff --git a/problems.json b/problems.json new file mode 100644 index 0000000..6e54b50 --- /dev/null +++ b/problems.json @@ -0,0 +1,3 @@ +{ + "problems": [] +} \ No newline at end of file diff --git a/utils/Pipfile b/utils/Pipfile new file mode 100644 index 0000000..331f1f9 --- /dev/null +++ b/utils/Pipfile @@ -0,0 +1,22 @@ +[[source]] + +url = "https://pypi.python.org/simple" +verify_ssl = true +name = "pypi" + + +[dev-packages] + +mypy = ">=0.782" +pycodestyle = ">=2.6.0" + + +[packages] + +libkarel = ">=1.0.2" +omegaup = ">=1.3.0" + + +[requires] + +python_version = "3.8" diff --git a/utils/Pipfile.lock b/utils/Pipfile.lock new file mode 100644 index 0000000..28b8919 --- /dev/null +++ b/utils/Pipfile.lock @@ -0,0 +1,160 @@ +{ + "_meta": { + "hash": { + "sha256": "c86a154ae724da2e2f59aa29e35340e6acbc7224154b3f7af6fe33cfe7b46e77" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.8" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.python.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "certifi": { + "hashes": [ + "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee", + "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8" + ], + "version": "==2021.5.30" + }, + "chardet": { + "hashes": [ + "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", + "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" + ], + "version": "==4.0.0" + }, + "idna": { + "hashes": [ + "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", + "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" + ], + "version": "==2.10" + }, + "libkarel": { + "hashes": [ + "sha256:45ddc5c3807aeec754b43f12eea363c9c334b5e4844242e08a2ce028566d68df", + "sha256:fdc30745ee1b1cf2bb5d2a5318fefebf52a207ade7b3164ede46d0b9a1a8bfba" + ], + "index": "pypi", + "version": "==1.0.2" + }, + "omegaup": { + "hashes": [ + "sha256:13f1e0afc5a4b4cd0118e3ad81c6c6f8a4a4b52a6405e3d84d0febf7fb1ecf3d", + "sha256:438ab9a32e2f046a679e8aebec128d11ab9795f77786c74a3c60a5396f8e775f" + ], + "index": "pypi", + "version": "==1.3.0" + }, + "requests": { + "hashes": [ + "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", + "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" + ], + "version": "==2.25.1" + }, + "urllib3": { + "hashes": [ + "sha256:753a0374df26658f99d826cfe40394a686d05985786d946fbe4165b5148f5a7c", + "sha256:a7acd0977125325f516bda9735fa7142b909a8d01e8b2e4c8108d0984e6e0098" + ], + "index": "pypi", + "version": "==1.26.5" + } + }, + "develop": { + "mypy": { + "hashes": [ + "sha256:0d0a87c0e7e3a9becdfbe936c981d32e5ee0ccda3e0f07e1ef2c3d1a817cf73e", + "sha256:25adde9b862f8f9aac9d2d11971f226bd4c8fbaa89fb76bdadb267ef22d10064", + "sha256:28fb5479c494b1bab244620685e2eb3c3f988d71fd5d64cc753195e8ed53df7c", + "sha256:2f9b3407c58347a452fc0736861593e105139b905cca7d097e413453a1d650b4", + "sha256:33f159443db0829d16f0a8d83d94df3109bb6dd801975fe86bacb9bf71628e97", + "sha256:3f2aca7f68580dc2508289c729bd49ee929a436208d2b2b6aab15745a70a57df", + "sha256:499c798053cdebcaa916eef8cd733e5584b5909f789de856b482cd7d069bdad8", + "sha256:4eec37370483331d13514c3f55f446fc5248d6373e7029a29ecb7b7494851e7a", + "sha256:552a815579aa1e995f39fd05dde6cd378e191b063f031f2acfe73ce9fb7f9e56", + "sha256:5873888fff1c7cf5b71efbe80e0e73153fe9212fafdf8e44adfe4c20ec9f82d7", + "sha256:61a3d5b97955422964be6b3baf05ff2ce7f26f52c85dd88db11d5e03e146a3a6", + "sha256:674e822aa665b9fd75130c6c5f5ed9564a38c6cea6a6432ce47eafb68ee578c5", + "sha256:7ce3175801d0ae5fdfa79b4f0cfed08807af4d075b402b7e294e6aa72af9aa2a", + "sha256:9743c91088d396c1a5a3c9978354b61b0382b4e3c440ce83cf77994a43e8c521", + "sha256:9f94aac67a2045ec719ffe6111df543bac7874cee01f41928f6969756e030564", + "sha256:a26f8ec704e5a7423c8824d425086705e381b4f1dfdef6e3a1edab7ba174ec49", + "sha256:abf7e0c3cf117c44d9285cc6128856106183938c68fd4944763003decdcfeb66", + "sha256:b09669bcda124e83708f34a94606e01b614fa71931d356c1f1a5297ba11f110a", + "sha256:cd07039aa5df222037005b08fbbfd69b3ab0b0bd7a07d7906de75ae52c4e3119", + "sha256:d23e0ea196702d918b60c8288561e722bf437d82cb7ef2edcd98cfa38905d506", + "sha256:d65cc1df038ef55a99e617431f0553cd77763869eebdf9042403e16089fe746c", + "sha256:d7da2e1d5f558c37d6e8c1246f1aec1e7349e4913d8fb3cb289a35de573fe2eb" + ], + "index": "pypi", + "version": "==0.812" + }, + "mypy-extensions": { + "hashes": [ + "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d", + "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8" + ], + "version": "==0.4.3" + }, + "pycodestyle": { + "hashes": [ + "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068", + "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef" + ], + "index": "pypi", + "version": "==2.7.0" + }, + "typed-ast": { + "hashes": [ + "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace", + "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff", + "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266", + "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528", + "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6", + "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808", + "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4", + "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363", + "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341", + "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04", + "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41", + "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e", + "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3", + "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899", + "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805", + "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c", + "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c", + "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39", + "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a", + "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3", + "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7", + "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f", + "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075", + "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0", + "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40", + "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428", + "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927", + "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3", + "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f", + "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65" + ], + "version": "==1.4.3" + }, + "typing-extensions": { + "hashes": [ + "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497", + "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342", + "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84" + ], + "version": "==3.10.0.0" + } + } +} diff --git a/utils/container.py b/utils/container.py new file mode 100644 index 0000000..10b800e --- /dev/null +++ b/utils/container.py @@ -0,0 +1,173 @@ +import contextlib +import datetime +import logging +import subprocess +import os.path + +from types import TracebackType +from typing import AnyStr, Iterator, IO, Optional, Type, Sequence + +import problems + +_LANGUAGE_MAPPING = { + 'cpp': 'cpp17-gcc', +} + + +@contextlib.contextmanager +def _maybe_open(path: Optional[str], + mode: str) -> Iterator[Optional[IO[AnyStr]]]: + """A contextmanager that can open a file, or return None. + + This is useful to provide arguments to subprocess.call() and its friends. + """ + if path is None: + yield None + else: + with open(path, mode) as f: + yield f + + +def getImageName(ci: bool) -> str: + """Ensures the container image is present in the expected version.""" + if ci: + # Since this is running on GitHub, downloading the image from the + # GitHub container registry is significantly faster. + imageName = 'docker.pkg.github.com/omegaup/quark/omegaup-runner-ci' + else: + # This does not require authentication. + imageName = 'omegaup/runner-ci' + + taggedContainerName = f'{imageName}:v1.9.27' + if not subprocess.check_output( + ['docker', 'image', 'ls', '-q', taggedContainerName], + universal_newlines=True).strip(): + logging.info('Downloading Docker image %s...', taggedContainerName) + subprocess.check_call(['docker', 'pull', taggedContainerName]) + return taggedContainerName + + +class Compile: + """Use the omegaUp container to compile and run programs. + + This is intended to be used as a context manager: + + with Compile(sourcePath='myprogram.cpp', ci=True) as c: + c.run(stdinPath='myinput.in', stdoutPath='myoutput.out') + """ + def __init__( + self, + sourcePath: str, + ci: bool, + ): + self.containerId = '' + self.containerSourceFilename = '' + self.sourcePath = sourcePath + self.ci = ci + + def __enter__(self) -> 'Compile': + extension = os.path.splitext(self.sourcePath)[1][1:] + self.language = _LANGUAGE_MAPPING.get(extension, extension) + self.containerSourceFilename = f'Main.{extension}' + self.containerId = subprocess.run([ + 'docker', + 'run', + '--rm', + '--detach', + '--entrypoint', + '/usr/bin/sleep', + '--volume', + (f'{os.path.abspath(self.sourcePath)}:' + f'/src/{self.containerSourceFilename}'), + getImageName(self.ci), + 'infinity', + ], + universal_newlines=True, + stdout=subprocess.PIPE, + check=True).stdout.strip() + + try: + self.run_command([ + '/var/lib/omegajail/bin/omegajail', + '--homedir', + '/src', + '--homedir-writable', + '--compile', + self.language, + '--compile-source', + self.containerSourceFilename, + '--compile-target', + 'Main', + ]) + except subprocess.CalledProcessError as cpe: + problems.error((f'Failed to compile {self.sourcePath}:\n' + + cpe.stderr.decode("utf-8")), + filename=self.sourcePath, + ci=self.ci) + # If the container errored out before returning, __exit__() won't + # be called, and the container will leak. Explicitly clean up + # before re-raising the exception to avoid that. + self.__cleanup() + raise + + return self + + def __exit__(self, exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType]) -> None: + self.__cleanup() + + def run( + self, + stdinPath: str, + stdoutPath: str, + *, + timeout: datetime.timedelta = datetime.timedelta(seconds=5) + ) -> None: + """Run a single invocation of the compiled binary.""" + self.run_command(args=[ + '/var/lib/omegajail/bin/omegajail', + '--homedir', + '/src', + '--run', + self.language, + '--run-target', + 'Main', + ], + stdinPath=stdinPath, + stdoutPath=stdoutPath, + timeout=timeout) + + def run_command( + self, + args: Sequence[str], + *, + stdinPath: Optional[str] = None, + stdoutPath: Optional[str] = None, + timeout: datetime.timedelta = datetime.timedelta(seconds=10) + ) -> None: + """Run an arbitrary command in the container.""" + logging.debug('Invoking command in container: "%s"', ' '.join(args)) + + with _maybe_open(stdinPath, + 'rb') as stdin, _maybe_open(stdoutPath, + 'wb') as stdout: + subprocess.run( + ['docker', 'exec', '--interactive', self.containerId] + + list(args), + stdin=stdin, + stdout=stdout, + stderr=subprocess.PIPE, + timeout=timeout.total_seconds(), + check=True) + + def __cleanup(self) -> None: + # The output is the same container id, so avoid printing it because + # it's just noise. + subprocess.check_call([ + 'docker', + 'container', + 'kill', + self.containerId, + ], + stdout=subprocess.DEVNULL) diff --git a/utils/generateresources.py b/utils/generateresources.py new file mode 100755 index 0000000..2b2a48b --- /dev/null +++ b/utils/generateresources.py @@ -0,0 +1,234 @@ +#!/usr/bin/python3 +import argparse +import concurrent.futures +import datetime +import json +import logging +import os +import re +import subprocess +import sys + +from typing import List, Optional + +import container +import problems + +_SUPPORTED_GENERATORS = frozenset(('png', 'testplan')) + + +def _getSolution(p: problems.Problem, *, rootDirectory: str, + ci: bool) -> Optional[str]: + """Gets the solution for the problem.""" + solutions = [ + f for f in os.listdir(os.path.join(rootDirectory, p.path, 'solutions')) + if f.startswith('solution.') + ] + + if not solutions: + return None + if len(solutions) != 1: + problems.fatal(f'Found more than one solution! {solutions}', + filename=os.path.join(p.path, 'settings.json'), + ci=ci) + + return os.path.join(rootDirectory, p.path, 'solutions', solutions[0]) + + +def _getInputs(p: problems.Problem, *, rootDirectory: str, + ci: bool) -> List[str]: + """Gets the list of .in files for the problem.""" + inFilenames = [ + f for subdirectory in ('cases', 'examples', 'statements') + for f in problems.enumerateFullPath( + os.path.join(rootDirectory, p.path, subdirectory)) + if f.endswith('.in') + ] + if not inFilenames: + problems.fatal(f'No test cases found for {p.title}!', + filename=os.path.join(p.path, 'settings.json'), + ci=ci) + return inFilenames + + +def _generateTestplan(p: problems.Problem, *, rootDirectory: str, force: bool, + ci: bool) -> bool: + """Generate testplan files for the provided problem.""" + logging.info('%-30s: Generating testplan for problem', p.title) + + if 'cases' not in p.config: + return True + + testplan = os.path.join(rootDirectory, p.path, 'testplan') + + logging.info('%-30s: Generating testplan from settings.json.', p.title) + + if os.path.isfile(testplan): + problems.fatal('testplan cannot exist when settings.json has cases!', + filename=os.path.relpath(testplan, rootDirectory), + ci=ci) + + with open(testplan, 'w') as tp: + for group in p.config['cases']: + for case in group['cases']: + tp.write("{} {}\n".format(case['name'], case['weight'])) + + return True + + +def _generateImages(p: problems.Problem, *, rootDirectory: str, force: bool, + ci: bool) -> bool: + """Generate .png files for the provided problem.""" + logging.info('%-30s: Generating images for problem', p.title) + + if p.config.get('misc', {}).get('languages') != 'karel': + logging.warning( + '%-30s: Not a karel problem! Skipping generating images.', p.title) + return True + + solutionPath = _getSolution(p, rootDirectory=rootDirectory, ci=ci) + if solutionPath is None: + logging.warning( + '%-30s: No solution found! Skipping generating images.', p.title) + return True + relativeSolutionPath = os.path.relpath(solutionPath, rootDirectory) + + inFilenames = _getInputs(p, rootDirectory=rootDirectory, ci=ci) + + anyProblemFailure = False + with container.Compile(sourcePath=solutionPath, ci=ci) as c: + logging.info('%-30s: Generating pngs for problem', p.title) + + for inFilename in inFilenames: + relativeInFilename = os.path.relpath(inFilename, rootDirectory) + outFilename = f'{os.path.splitext(inFilename)[0]}.out' + + logging.debug('%-30s: Generating .pngs for %s', p.title, + inFilename) + dimMatch = re.search(r'\.(\d*)x(\d*)\.in', inFilename) + if dimMatch: + dimOpts = [ + '--height', + dimMatch.group(1), '--width', + dimMatch.group(2) + ] + else: + dimOpts = [] + + try: + c.run_command([ + '/opt/nodejs/lib/node_modules/karel/cmd/kareljs', + 'draw', + '--output=-', + ] + dimOpts, + stdinPath=inFilename, + stdoutPath=f'{inFilename}.png', + timeout=datetime.timedelta(seconds=10)) + except subprocess.CalledProcessError as cpe: + anyProblemFailure = True + problems.error((f'failed generating ' + f'input .png for {relativeInFilename}:\n' + + cpe.stderr.decode("utf-8")), + filename=relativeInFilename, + ci=ci) + continue + + try: + c.run_command([ + '/opt/nodejs/lib/node_modules/karel/cmd/kareljs', + 'draw', + '--output=-', + '--run', + os.path.join('/src', c.containerSourceFilename), + ] + dimOpts, + stdinPath=inFilename, + stdoutPath=f'{outFilename}.png', + timeout=datetime.timedelta(seconds=10)) + except subprocess.CalledProcessError as cpe: + anyProblemFailure = True + problems.error((f'{relativeSolutionPath} failed generating ' + f'output .png with {relativeInFilename}:\n' + + cpe.stderr.decode("utf-8")), + filename=relativeSolutionPath, + ci=ci) + + if anyProblemFailure: + logging.warning('%-30s: Failed generating some .png files', p.title) + return False + + logging.info('%-30s: Success generating .png files', p.title) + return True + + +def _main() -> None: + parser = argparse.ArgumentParser('Generate resources') + parser.add_argument( + '--all', + action='store_true', + help='Consider all problems, instead of only those that have changed') + parser.add_argument('--ci', + action='store_true', + help='Signal that this is being run from the CI.') + parser.add_argument('--force', + action='store_true', + help='Force re-generating all resources') + parser.add_argument('--jobs', + '-j', + default=min(32, (os.cpu_count() or 2) + 4), + help='Number of threads to run concurrently') + parser.add_argument('--generate', + default=_SUPPORTED_GENERATORS, + type=lambda x: set(x.split(',')), + help=('Comma-separated list of artifacts to generate. ' + 'Should be a subset of {png,testplan}. ' + 'Generates everything by default.')) + parser.add_argument('--verbose', + action='store_true', + help='Verbose logging') + parser.add_argument('problem_paths', + metavar='PROBLEM', + type=str, + nargs='*') + args = parser.parse_args() + + if args.generate - _SUPPORTED_GENERATORS: + logging.error('Provided generators not supported: %r', + args.generate - _SUPPORTED_GENERATORS) + sys.exit(1) + + logging.basicConfig(format='%(asctime)s: %(message)s', + level=logging.DEBUG if args.verbose else logging.INFO) + logging.getLogger('urllib3').setLevel(logging.CRITICAL) + + rootDirectory = problems.repositoryRoot() + + with concurrent.futures.ThreadPoolExecutor( + max_workers=args.jobs) as executor: + futures: List[concurrent.futures.Future[bool]] = [] + + for p in problems.problems(allProblems=args.all, + rootDirectory=rootDirectory, + problemPaths=args.problem_paths): + if 'testplan' in args.generate: + futures.append( + executor.submit(_generateTestplan, + p, + rootDirectory=rootDirectory, + force=args.force, + ci=args.ci)) + if 'png' in args.generate: + futures.append( + executor.submit(_generateImages, + p, + rootDirectory=rootDirectory, + force=args.force, + ci=args.ci)) + + if not all(future.result() + for future in concurrent.futures.as_completed(futures)): + logging.error('Some resources failed to generate') + sys.exit(1) + + +if __name__ == '__main__': + _main() diff --git a/utils/lint b/utils/lint new file mode 100755 index 0000000..d7e1bad --- /dev/null +++ b/utils/lint @@ -0,0 +1,58 @@ +#!/bin/bash + +set -e + +ROOT="$(git rev-parse --show-superproject-working-tree --show-toplevel | \ + head -n1)" +CONTAINER_IMAGE=omegaup/hook_tools:v1.0.9 + +if [[ $# != 0 ]]; then + # The caller has given us the explicit arguments. + ARGS="$@" +else + # Try to guess the set of changed files. Only specifying one commit so it + # diffs against the current working tree. + REMOTE="origin" + if [ -d "${ROOT}/.git/refs/remotes/upstream" ]; then + REMOTE="upstream" + fi + REMOTE_HASH="$(git rev-parse "${REMOTE}/main")" + MERGE_BASE="$(git merge-base "${REMOTE_HASH}" HEAD)" + ARGS="fix ${MERGE_BASE}" +fi + +if [[ -t 0 ]]; then + # This is being run in an environment where stdin is connected to a TTY. + TTY_ARGS="--interactive --tty" +else + TTY_ARGS="" +fi + +if [[ -d /proc ]] && grep -q pids:/docker /proc/1/cgroup; then + echo "Running ./stuff/lint.sh inside a container is not supported." 1>&2 + echo "Please run this command outside the container" 1>&2 + exit 1 +fi +DOCKER_PATH="$(which docker)" +if [[ -z "${DOCKER_PATH}" ]]; then + echo "Docker binary not found." 1>&2 + echo "Please install docker or run this command outside the container." 1>&2 + exit 1 +fi + +if [[ "${ARGS}" == '--only-pull-image' ]]; then + exec "${DOCKER_PATH}" pull "${CONTAINER_IMAGE}" +fi + +"${DOCKER_PATH}" run $TTY_ARGS --rm \ + --user "$(id -u):$(id -g)" \ + --env "GIT_AUTHOR_NAME=$(git config user.name)" \ + --env "GIT_AUTHOR_EMAIL=$(git config user.email)" \ + --env "GIT_COMMITTER_NAME=$(git config user.name)" \ + --env "GIT_COMMITTER_EMAIL=$(git config user.email)" \ + --volume "${ROOT}:/src" \ + --env 'PYTHONIOENCODING=utf-8' \ + --env "MYPYPATH=${ROOT}/stuff" \ + "${CONTAINER_IMAGE}" --command-name="./stuff/lint.sh" $ARGS + +echo OK diff --git a/utils/problems.py b/utils/problems.py new file mode 100644 index 0000000..1794d14 --- /dev/null +++ b/utils/problems.py @@ -0,0 +1,189 @@ +import logging +import os +import sys +import subprocess +import json + +from typing import Any, List, Mapping, NamedTuple, NoReturn, Optional, Sequence + + +SETTINGS_JSON = 'settings.json' +GITIGNORE = '.gitignore' +OUT_PATTERN = '**/*.out' +PROBLEMS_JSON = 'problems.json' +DEFAULT_COMMIT_RANGE = 'origin/main...HEAD' + + +class Problem(NamedTuple): + """Represents a single problem.""" + path: str + title: str + config: Mapping[str, Any] + + @staticmethod + def load(problemPath: str, rootDirectory: str) -> 'Problem': + """Load a single problem from the path.""" + settings_path = os.path.join(rootDirectory, problemPath, SETTINGS_JSON) + try: + with open(settings_path) as f: + problemConfig = json.load(f) + except FileNotFoundError: + raise FileNotFoundError(f"{SETTINGS_JSON} not found at: {settings_path}") + except json.JSONDecodeError as e: + raise ValueError(f"Invalid JSON format in {SETTINGS_JSON} at: {settings_path}. Error: {e}") + + return Problem(path=problemPath, + title=problemConfig['title'], + config=problemConfig) + + def shouldGenerateOutputs(self, *, rootDirectory: str) -> bool: + """Returns whether the .out files should be generated for this problem. + + .out files are only generated if there is a .gitignore file that + contains the line `**/*.out` in the problem directory. + """ + gitignorePath = os.path.join(rootDirectory, self.path, GITIGNORE) + if not os.path.isfile(gitignorePath): + return False + with open(gitignorePath, 'r') as f: + for line in f: + if line.strip() == OUT_PATTERN: + return True + return False + + +def repositoryRoot() -> str: + """Returns the root directory of the project. + + If this is a submodule, it gets the root of the top-level working tree. + Raises RuntimeError if it fails to determine the root. + """ + try: + output = subprocess.check_output([ + 'git', 'rev-parse', '--show-superproject-working-tree', + '--show-toplevel' + ], universal_newlines=True) + return output.strip().split()[0] + except subprocess.CalledProcessError: + raise RuntimeError("Failed to find Git repository root: not inside a Git repo.") + except FileNotFoundError: + raise RuntimeError("Git is not installed or not found in PATH.") + + +def enumerateFullPath(path: str) -> List[str]: + """Returns a list of full paths for the files in `path`.""" + if not os.path.exists(path): + return [] + return [os.path.join(path, f) for f in os.listdir(path)] + + +def ci_error(message: str, + *, + filename: Optional[str] = None, + line: Optional[int] = None, + col: Optional[int] = None) -> None: + """Show an error message, only on the CI.""" + location = [] + if filename is not None: + location.append(f'file={filename}') + if line is not None: + location.append(f'line={line}') + if col is not None: + location.append(f'col={col}') + print( + f'::error {",".join(location)}::' + + message.replace('%', '%25').replace('\r', '%0D').replace('\n', '%0A'), + file=sys.stderr, + flush=True) + + +def error(message: str, + *, + filename: Optional[str] = None, + line: Optional[int] = None, + col: Optional[int] = None, + ci: bool = False) -> None: + """Show an error message.""" + if ci: + ci_error(message, filename=filename, line=line, col=col) + else: + logging.error(message) + + +def fatal(message: str, + *, + filename: Optional[str] = None, + line: Optional[int] = None, + col: Optional[int] = None, + ci: bool = False) -> NoReturn: + """Show a fatal message and exit.""" + error(message, filename=filename, line=line, col=col, ci=ci) + sys.exit(1) + + +def problems(allProblems: bool = False, + problemPaths: Sequence[str] = (), + rootDirectory: Optional[str] = None) -> List[Problem]: + """Gets the list of problems that will be considered. + + If `allProblems` is passed, all the problems that are declared in + `problems.json` will be returned. Otherwise, only those that have + differences with `upstream/main`. + """ + env = os.environ + if rootDirectory is None: + rootDirectory = repositoryRoot() + + logging.info('Loading problems...') + + if problemPaths: + # Generate the Problem objects from just the path. The title is ignored + # anyways, since it's read from the configuration file in the problem + # directory for anything important. + return [ + Problem.load(problemPath=problemPath, rootDirectory=rootDirectory) + for problemPath in problemPaths + ] + + with open(os.path.join(rootDirectory, PROBLEMS_JSON), 'r') as p: + config = json.load(p) + + configProblems: List[Problem] = [] + for problem in config['problems']: + if problem.get('disabled', False): + logging.warning('Problem %s disabled. Skipping.', problem['title']) + continue + configProblems.append( + Problem.load(problemPath=problem['path'], + rootDirectory=rootDirectory)) + + if allProblems: + logging.info('Loading everything as requested.') + return configProblems + + logging.info('Loading git diff.') + + if env.get('TRAVIS_COMMIT_RANGE'): + commitRange = env['TRAVIS_COMMIT_RANGE'] + elif env.get('CIRCLE_COMPARE_URL'): + commitRange = env['CIRCLE_COMPARE_URL'].split('/')[6] + elif env.get('GITHUB_BASE_COMMIT'): + commitRange = env['GITHUB_BASE_COMMIT'] + '...HEAD' + else: + commitRange = DEFAULT_COMMIT_RANGE + + changes = subprocess.check_output( + ['git', 'diff', '--name-only', '--diff-filter=AMDR', commitRange], + cwd=rootDirectory, + universal_newlines=True) + + problems: List[Problem] = [] + for problem in configProblems: + logging.info('Loading %s.', problem.title) + + if problem.path not in changes: + logging.info('No changes to %s. Skipping.', problem.title) + continue + problems.append(problem) + + return problems diff --git a/utils/runtests.py b/utils/runtests.py new file mode 100644 index 0000000..a9c293a --- /dev/null +++ b/utils/runtests.py @@ -0,0 +1,381 @@ +#!/usr/bin/python3 +import argparse +import collections +import concurrent.futures +import decimal +import json +import logging +import os +import os.path +import shlex +import shutil +import subprocess +import sys +import textwrap +import threading + +from typing import Any, DefaultDict, Dict, List, Mapping, Optional, Tuple + +import container +import problems + +TestResult = Tuple[problems.Problem, Mapping[str, Any]] + +_SANDBOX_DISABLED_WARNING = 'WARNING: Running with --disable-sandboxing' + + +def _availableProcessors() -> int: + """Returns the number of available processors.""" + try: + return len(os.sched_getaffinity(0)) + except AttributeError: + # os.sched_setaffinity() is not available in all OSs. Since we don't + # want to speculate how many cores there are, let's be paranoid and + # return 1. + return 1 + + +def _threadInitializer(threadAffinityMapping: Dict[int, int], + lock: threading.Lock) -> None: + """Set the thread affinity mapping for the current thread.""" + with lock: + threadAffinityMapping[threading.get_ident()] = len( + threadAffinityMapping) + + +def _testProblem(p: problems.Problem, *, threadAffinityMapping: Dict[int, int], + resultsDirectory: str, rootDirectory: str, + ci: bool) -> Optional[TestResult]: + """Run the CI on a single problem.""" + logging.info('[%2d] %-30s: Testing problem...', + threadAffinityMapping[threading.get_ident()], p.title) + + problemResultsDirectory = os.path.join(resultsDirectory, p.path) + problemOutputsDirectory = os.path.join(resultsDirectory, p.path, 'outputs') + os.makedirs(problemOutputsDirectory) + # The results are written with the container's UID, which does not + # necessarily match the caller's UID. To avoid that problem, we create + # the results directory with very lax permissions so that the container + # can write it. + os.chmod(problemResultsDirectory, 0o777) + os.chmod(problemOutputsDirectory, 0o777) + with open(os.path.join(problemResultsDirectory, 'ci.log'), 'w') as f: + pass + # Also make the ci log's permissions very lax. + os.chmod(os.path.join(problemResultsDirectory, 'ci.log'), 0o666) + + if p.shouldGenerateOutputs(rootDirectory=rootDirectory): + outputsArgs = [ + '-outputs', + os.path.relpath(problemOutputsDirectory, rootDirectory), + ] + else: + outputsArgs = [] + + if len(threadAffinityMapping) == 1: + # No need to involve taskset. Just run the container normally. + tasksetArgs = [ + container.getImageName(ci), + ] + else: + # Mark the entrypoint as only being able to run in a single core. + tasksetArgs = [ + '--entrypoint', + '/usr/bin/taskset', + container.getImageName(ci), + f'0x{2**threadAffinityMapping[threading.get_ident()]:x}', + '/usr/bin/omegaup-runner', + ] + + args = [ + 'docker', + 'run', + '--rm', + '--volume', + f'{rootDirectory}:/src', + ] + tasksetArgs + [ + '-oneshot=ci', + '-input', + p.path, + '-results', + os.path.relpath(problemResultsDirectory, rootDirectory), + ] + outputsArgs + + logging.debug('[%2d] %-30s: Running `%s`...', + threadAffinityMapping[threading.get_ident()], p.title, + shlex.join(args)) + processResult = subprocess.run(args, + universal_newlines=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=rootDirectory) + + if processResult.returncode != 0: + problems.error(f'Failed to run {p.title}:\n{processResult.stderr}', + filename=os.path.join(p.path, 'settings.json'), + ci=ci) + return None + + # The CI might have written a log, but the stderr contents have a few + # more things in it. + with open(os.path.join(problemResultsDirectory, 'ci.log'), 'w') as f: + f.write(processResult.stderr) + + for root, _, filenames in os.walk(problemOutputsDirectory): + for filename in filenames: + shutil.copy( + os.path.join(root, filename), + os.path.join( + rootDirectory, p.path, + os.path.relpath(os.path.join(root, filename), + problemOutputsDirectory))) + + report = json.loads(processResult.stdout) + logging.info('[%2d] %-30s: %s', + threadAffinityMapping[threading.get_ident()], p.title, + report['state']) + return p, report + + +def _main() -> None: + rootDirectory = problems.repositoryRoot() + + parser = argparse.ArgumentParser('Run tests') + parser.add_argument('--ci', + action='store_true', + help='Signal that this is being run from the CI.') + parser.add_argument('--all', + action='store_true', + help=('Consider all problems, instead of ' + 'only those that have changed')) + parser.add_argument('--jobs', + '-j', + type=int, + default=_availableProcessors(), + help='Number of threads to run concurrently') + parser.add_argument('--verbose', + action='store_true', + help='Verbose logging') + parser.add_argument('--results-directory', + default=os.path.join(rootDirectory, 'results'), + help='Directory to store the results of the runs') + parser.add_argument('--overwrite-outs', + action='store_true', + help=('Overwrite all .out files if ' + 'a generator is present')) + parser.add_argument('--only-pull-image', + action='store_true', + help=('Don\'t run tests: ' + 'only download the Docker container')) + parser.add_argument('problem_paths', + metavar='PROBLEM', + type=str, + nargs='*') + args = parser.parse_args() + + logging.basicConfig(format='%(asctime)s: %(message)s', + level=logging.DEBUG if args.verbose else logging.INFO) + logging.getLogger('urllib3').setLevel(logging.CRITICAL) + + if args.only_pull_image: + container.getImageName(args.ci) + sys.exit(0) + + anyFailure = False + + if os.path.isdir(args.results_directory): + shutil.rmtree(args.results_directory) + os.makedirs(args.results_directory) + + # Run all the tests in parallel, but set the CPU affinity mask to a unique + # core for each thread in the pool. This mimics how the production + # container works (except for I/O). + futures: List[concurrent.futures.Future[Optional[TestResult]]] = [] + threadAffinityMapping: Dict[int, int] = {} + threadAffinityMappingLock = threading.Lock() + with concurrent.futures.ThreadPoolExecutor( + max_workers=min(os.cpu_count() or 1, args.jobs), + initializer=_threadInitializer, + initargs=(threadAffinityMapping, + threadAffinityMappingLock)) as executor: + for p in problems.problems(allProblems=args.all, + rootDirectory=rootDirectory, + problemPaths=args.problem_paths): + if (p.shouldGenerateOutputs(rootDirectory=rootDirectory) + and args.overwrite_outs): + logging.info('[ ] %-30s: Removing old .out files...', p.title) + for filename in os.listdir( + os.path.join(rootDirectory, p.path, 'cases')): + if not filename.endswith('.out'): + continue + os.unlink( + os.path.join(rootDirectory, p.path, 'cases', filename)) + + futures.append( + executor.submit(_testProblem, + p, + resultsDirectory=args.results_directory, + rootDirectory=rootDirectory, + threadAffinityMapping=threadAffinityMapping, + ci=args.ci)) + + # Once the results are gathered, display the results all at once. This + # limits the interleaving to make the output less confusing. + for future in concurrent.futures.as_completed(futures): + futureResult = future.result() + if futureResult is None: + anyFailure = True + continue + + p, report = futureResult + + problemResultsDirectory = os.path.join(args.results_directory, p.path) + + if report['state'] != 'passed': + anyFailure = True + + if report['state'] == 'skipped': + errorString = report['error'] or ( + 'tests/tests.json, settings.json, outs, or testplan are ' + 'probably missing or invalid.') + problems.error(f'Skipped {p.title}: {errorString}', + filename=os.path.join(p.path, 'settings.json'), + ci=args.ci) + continue + + for testResult in report.get('tests', []): + if testResult['type'] == 'solutions': + testedFile = os.path.normpath( + os.path.join(p.path, 'tests', testResult['filename'])) + + expected = dict(testResult['solution']) + del (expected['filename']) + if not expected: + # If there are no constraints, by default expect the run to + # be accepted. + expected['verdict'] = 'AC' + logsDirectory = os.path.join(problemResultsDirectory, + str(testResult['index'])) + else: + if testResult['type'] == 'invalid-inputs': + testedFile = os.path.normpath( + os.path.join(p.path, + 'tests', + 'invalid-inputs', + testResult['filename'])) + expected = {'verdict': 'WA'} + else: + testedFile = os.path.normpath( + os.path.join(p.path, + 'cases', + testResult['filename'])) + expected = {'verdict': 'AC'} + logsDirectory = os.path.join(problemResultsDirectory, + str(testResult['index']), + 'validator') + + got = { + 'verdict': testResult.get('result', {}).get('verdict'), + 'score': testResult.get('result', {}).get('score'), + } + + logging.info( + f' {testResult["type"][:10]:10} | ' + f'{testResult["filename"][:40]:40} | ' + f'{testResult["state"]:8} | ' + f'expected={expected} got={got} | ' + f'logs at {os.path.relpath(logsDirectory, rootDirectory)}') + + failureMessages: DefaultDict[ + str, List[str]] = collections.defaultdict(list) + + normalizedScore = decimal.Decimal(got.get('score', 0)) + scaledScore = round(normalizedScore, 15) * 100 + + if testResult['state'] != 'passed': + # Build a table that reports groups and case verdicts. + groupReportTable = [ + f'{"group":20} | {"case":20} | {"score":7} | {"verdict"}', + f'{"-"*20}-+-{"-"*20}-+-{"-"*7}-+-{"-"*7}', + ] + if 'compile_error' in testResult['result']: + failureMessage = f"{testedFile}:\n" + textwrap.indent( + testResult['result']['compile_error'], ' ') + failureMessages[testedFile].append(failureMessage) + if testResult['result']['groups'] is not None: + for group in testResult['result']['groups']: + groupReportTable.append( + f'{group["group"][:20]:20} | {"":20} | ' + f'{group["score"]*100:6.2f}% |') + for c in group['cases']: + groupReportTable.append( + f'{"":20} | {c["name"][:20]:20} | ' + f'{c["score"]*100:6.2f}% | {c["verdict"]:3}') + groupReportTable.append( + f'{"-"*20}-+-{"-"*20}-+-{"-"*7}-+-{"-"*7}') + + failureMessages[testedFile].append( + '\n'.join(groupReportTable)) + + failedCases = { + c['name'] + for g in testResult['result']['groups'] + for c in g['cases'] if c['verdict'] != 'AC' + } + else: + failedCases = set() + + if os.path.isdir(logsDirectory): + for stderrFilename in sorted(os.listdir(logsDirectory)): + caseName = os.path.splitext(stderrFilename)[0] + + if not stderrFilename.endswith('.err'): + continue + if caseName not in failedCases: + continue + + if testResult['type'] == 'solutions': + associatedFile = testedFile + else: + associatedFile = os.path.join( + p.path, 'cases', f'{caseName}.in') + + with open(os.path.join(logsDirectory, stderrFilename), + 'r') as out: + contents = out.read().strip() + + if contents.startswith(_SANDBOX_DISABLED_WARNING): + contents = contents[ + len(_SANDBOX_DISABLED_WARNING):].strip() + + if not contents: + continue + + failureMessage = ( + f'{stderrFilename}:' + f'\n{textwrap.indent(contents, " ")}') + + failureMessages[associatedFile].append( + failureMessage) + else: + logging.warning('Logs directory %r not found.', + logsDirectory) + + for (path, messages) in failureMessages.items(): + problems.error( + (f'Validation failed for problem: {p.title}\n' + f'Related file: {path}\n') + '\n'.join(messages), + filename=path, + ci=args.ci) + + logging.info(f'Results for {p.title}: {report["state"]}') + logging.info(f' Full logs and report in {problemResultsDirectory}') + + if anyFailure: + logging.info('') + logging.info('At least one problem failed.') + sys.exit(1) + + +if __name__ == '__main__': + _main() diff --git a/utils/sample-config.yaml b/utils/sample-config.yaml new file mode 100644 index 0000000..7373d4c --- /dev/null +++ b/utils/sample-config.yaml @@ -0,0 +1,51 @@ +alias: dummy-problem + +# optional: if false, the problem won't be deployed +# disabled: true + +# optional: a list of the admin users for this problem +# admins: [] + +# optional: a list of the admin groups for this problem +# admin-groups: [] + +# if true, ignores the .outs and uses the solution +# to generate them from the .ins +generate-output: true +solution: solution/solution.cpp +# timeout for .out generator per .in +timeout: 0.5 + +# optional: create the problem if it isn't already in omegaUp +# default true +# create: false + +# optional: upload the required files for interactive problems +# default false +# conflicts with generate-output: +# generate-output must be false if interactive is true +# interactive: true + +# all of these are required unless create is set to false +params: + title: Problem Title + source: Problem Source + + visibility: 1 + + # valid options: all, karel + languages: all + + time_limit: 1000 + memory_limit: 32768 + output_limit: 16384 + stack_limit: 10485760 + + overall_wall_time_limit: 60000 + + # valid options: token-caseless, token-numeric, token-numeric, custom, literal + validator: token-caseless + validator_time_limit: 1000 + extra_wall_time: 0 + + email_clarifications: 0 diff --git a/utils/upload.py b/utils/upload.py new file mode 100755 index 0000000..8d763b7 --- /dev/null +++ b/utils/upload.py @@ -0,0 +1,397 @@ +#!/usr/bin/python3 +import argparse +import json +import logging +import os +import subprocess +import tempfile +import zipfile +import re + +from typing import Any, Mapping, Set + +import omegaup.api +import problems + + +# === Constants === +SETTINGS_JSON = 'settings.json' +TESTPLAN_FILE = 'testplan' +STATEMENTS_DIR = 'statements' +SOLUTIONS_DIR = 'solutions' +CASES_DIR = 'cases' +EXAMPLES_DIR = 'examples' +INTERACTIVE_DIR = 'interactive' +VALIDATOR_PREFIX = 'validator' + +API_PROBLEM_DETAILS = '/api/problem/details/' +API_PROBLEM_CREATE = '/api/problem/create/' +API_PROBLEM_UPDATE = '/api/problem/update/' + +LANGUAGES_ALL = ','.join(( + 'c11-gcc', + 'c11-clang', + 'cpp11-gcc', + 'cpp11-clang', + 'cpp17-gcc', + 'cpp17-clang', + 'cs', + 'hs', + 'java', + 'lua', + 'pas', + 'py2', + 'py3', + 'rb', +)) +LANGUAGES_KAREL = 'kj,kp' +LANGUAGES_NONE = '' + + +def createProblemZip(problemConfig: Mapping[str, Any], problemPath: str, + zipPath: str) -> None: + """Creates a problem .zip on the provided path.""" + with zipfile.ZipFile(zipPath, 'w', + compression=zipfile.ZIP_DEFLATED) as archive: + + def _addFile(f: str) -> None: + logging.debug('writing %s', f) + archive.write(f, os.path.relpath(f, problemPath)) + + def _recursiveAdd(directory: str) -> None: + for (root, _, + filenames) in os.walk(os.path.join(problemPath, directory)): + for f in filenames: + _addFile(os.path.join(root, f)) + + testplan = os.path.join(problemPath, TESTPLAN_FILE) + + if os.path.isfile(testplan): + _addFile(testplan) + + if problemConfig['Validator']['Name'] == 'custom': + validators = [ + x for x in os.listdir(problemPath) if x.startswith(VALIDATOR_PREFIX) + ] + + if not validators: + raise Exception('Custom validator missing!') + if len(validators) != 1: + raise Exception('More than one validator found!') + + validator = os.path.join(problemPath, validators[0]) + + _addFile(validator) + + for directory in (STATEMENTS_DIR, SOLUTIONS_DIR, CASES_DIR): + _recursiveAdd(directory) + + for directory in (EXAMPLES_DIR, INTERACTIVE_DIR): + if not os.path.isdir(os.path.join(problemPath, directory)): + continue + _recursiveAdd(directory) + + +def uploadProblemZip(client: omegaup.api.Client, + problemConfig: Mapping[str, Any], canCreate: bool, + zipPath: str, commitMessage: str) -> None: + """Uploads a problem with the given .zip and configuration.""" + misc = problemConfig.get('misc', {}) + alias = problemConfig.get('alias', "") + limits = problemConfig.get('Limits', {}) + validator = problemConfig.get('Validator', {}) + + payload = { + 'message': commitMessage, + 'problem_alias': alias, + } + + if misc: + if misc.get('visibility') is not None: + payload['visibility'] = misc['visibility'] + if misc.get('languages') is not None: + payload['languages'] = misc['languages'] + if misc.get('email_clarifications') is not None: + payload['email_clarifications'] = misc.get('email_clarifications', 0) + if misc.get('group_score_policy') is not None: + payload['group_score_policy'] = misc.get('group_score_policy', 'sum-if-not-zero'), + + if limits: + time_limit = limits.get('TimeLimit') + if time_limit is not None: + payload['time_limit'] = parse_limit_value(time_limit) + memory_limit = limits.get('MemoryLimit') + if memory_limit is not None: + payload['memory_limit'] = parse_limit_value(memory_limit) // 1024 + input_limit = limits.get('InputLimit') + if input_limit is not None: + payload['input_limit'] = parse_limit_value(input_limit) + output_limit = limits.get('OutputLimit') + if output_limit is not None: + payload['output_limit'] = parse_limit_value(output_limit) + extra_wall_time = limits.get('ExtraWallTime') + if extra_wall_time is not None: + payload['extra_wall_time'] = parse_limit_value(extra_wall_time) + overall_wall_time = limits.get('OverallWallTimeLimit') + payload['overall_wall_time_limit'] = ( + parse_limit_value(overall_wall_time) if overall_wall_time is not None else 0 + ) + + if validator: + if validator.get('validator') is None: + payload['validator'] = validator.get('Name', 'default') + + exists = client.query(API_PROBLEM_DETAILS, {'problem_alias': alias})['status'] == 'ok' + + if not exists: + if not canCreate: + raise Exception("Problem doesn't exist!") + logging.info("Problem doesn't exist. Creating problem.") + endpoint = API_PROBLEM_CREATE + else: + endpoint = API_PROBLEM_UPDATE + + languages = payload.get('languages', '') + + if languages == 'all': + payload['languages'] = LANGUAGES_ALL + elif languages == 'karel': + payload['languages'] = LANGUAGES_KAREL + elif languages == 'none': + payload['languages'] = LANGUAGES_NONE + + with open(zipPath, 'rb') as f: + files = {'problem_contents': f} + client.query(endpoint, payload, files) + + if exists: + course_alias = misc.get('course_alias', '') + assignment_alias = misc.get('assignment_alias', '') + + if course_alias and assignment_alias: + try: + details = client.course.assignmentDetails( + course=course_alias, + assignment=assignment_alias + ) + + versions = client.problem.versions(problem_alias=alias, + check_=False) + commit = getattr(versions, 'published', '') + + if not commit: + logging.warning("No commit found in versions: %s", versions) + commit = '' + + client.course.addProblem( + course_alias=course_alias, + assignment_alias=assignment_alias, + problem_alias=alias, + points=getattr(details, 'points', 100.0), + check_=False + ) + logging.info( + "Successfully added problem %s to course %s, assignment %s", + alias, course_alias, assignment_alias) + + except Exception as e: + logging.warning("Could not add problem to assignment: %s", e) + else: + logging.info( + "No course information found, problem %s uploaded successfully", alias) + + targetAdmins = misc.get('admins', []) + targetAdminGroups = misc.get('admin-groups', []) + allAdmins = None + + if targetAdmins or targetAdminGroups: + allAdmins = client.problem.admins(problem_alias=alias) + + if targetAdmins and allAdmins: + admins = { + a['username'].lower() + for a in allAdmins['admins'] if a['role'] == 'admin' + } + + desiredAdmins = {admin.lower() for admin in targetAdmins} + + clientAdmin: Set[str] = set() + if client.username: + clientAdmin.add(client.username.lower()) + adminsToRemove = admins - desiredAdmins - clientAdmin + adminsToAdd = desiredAdmins - admins - clientAdmin + + for admin in adminsToAdd: + logging.info('Adding problem admin: %s', admin) + client.problem.addAdmin(problem_alias=alias, usernameOrEmail=admin) + + for admin in adminsToRemove: + logging.info('Removing problem admin: %s', admin) + client.problem.removeAdmin(problem_alias=alias, + usernameOrEmail=admin) + + if targetAdminGroups and allAdmins: + adminGroups = { + a['alias'].lower() + for a in allAdmins['group_admins'] if a['role'] == 'admin' + } + + desiredGroups = {group.lower() for group in targetAdminGroups} + + groupsToRemove = adminGroups - desiredGroups + groupsToAdd = desiredGroups - adminGroups + + for group in groupsToAdd: + logging.info('Adding problem admin group: %s', group) + client.problem.addGroupAdmin(problem_alias=alias, group=group) + + for group in groupsToRemove: + logging.info('Removing problem admin group: %s', group) + client.problem.removeGroupAdmin(problem_alias=alias, group=group) + + if 'tags' in misc: + tags = { + t['name'].lower() + for t in client.problem.tags(problem_alias=alias)['tags'] + } + + desiredTags = {t.lower() for t in misc['tags']} + + tagsToRemove = tags - desiredTags + tagsToAdd = desiredTags - tags + + for tag in tagsToRemove: + if tag.startswith('problemRestrictedTag'): + logging.info('Skipping restricted tag: %s', tag) + continue + client.problem.removeTag(problem_alias=alias, name=tag) + + for tag in tagsToAdd: + logging.info('Adding problem tag: %s', tag) + client.problem.addTag(problem_alias=alias, + name=tag, + public=payload.get('public', False)) + + +def parse_limit_value(value): + if value is None: + return None + if isinstance(value, (int, float)): + return int(value) + if isinstance(value, str): + value = value.strip().lower() + if value.endswith("ms"): + return int(float(value[:-2])) + if value.endswith("s"): + return int(float(value[:-1]) * 1000) + if re.match(r"^\d+(\.\d+)?$", value): + # Assume milliseconds if no suffix + return int(float(value)) + raise ValueError(f"Invalid limit value format: {value}") + raise TypeError(f"Unsupported type for limit value: {type(value)}") + + +def uploadProblem(client: omegaup.api.Client, problemPath: str, + commitMessage: str, canCreate: bool) -> None: + with open(os.path.join(problemPath, SETTINGS_JSON), 'r') as f: + problemConfig = json.load(f) + + logging.info('Uploading problem: %s', problemConfig['alias']) + path_parts = problemPath.split(os.sep) + course_alias = '' + assignment_alias = '' + + if len(path_parts) >= 3: + assignment_alias = path_parts[-2] + course_alias = path_parts[-3] + + if 'misc' not in problemConfig: + problemConfig['misc'] = {} + + problemConfig['misc']['course_alias'] = course_alias + problemConfig['misc']['assignment_alias'] = assignment_alias + + with tempfile.NamedTemporaryFile() as tempFile: + createProblemZip(problemConfig, problemPath, tempFile.name) + + uploadProblemZip(client, + problemConfig, + canCreate, + tempFile.name, + commitMessage=commitMessage) + + logging.info('Success uploading %s', problemConfig['alias']) + + +def _main() -> None: + env = os.environ + + parser = argparse.ArgumentParser( + description='Deploy a problem to omegaUp.') + parser.add_argument('--ci', + action='store_true', + help='Signal that this is being run from the CI.') + parser.add_argument( + '--all', + action='store_true', + help='Consider all problems, instead of only those that have changed') + parser.add_argument('--verbose', + action='store_true', + help='Verbose logging') + parser.add_argument('--url', + default='https://omegaup.com', + help='URL of the omegaUp host.') + parser.add_argument('--api-token', + type=str, + default=env.get('OMEGAUP_API_TOKEN')) + parser.add_argument('-u', + '--username', + type=str, + default=env.get('OMEGAUPUSER'), + required=('OMEGAUPUSER' not in env + and 'OMEGAUP_API_TOKEN' not in env)) + parser.add_argument('-p', + '--password', + type=str, + default=env.get('OMEGAUPPASS'), + required=('OMEGAUPPASS' not in env + and 'OMEGAUP_API_TOKEN' not in env)) + parser.add_argument('--can-create', + action='store_true', + help=("Whether it's allowable to create the " + "problem if it does not exist.")) + parser.add_argument('problem_paths', + metavar='PROBLEM', + type=str, + nargs='*') + args = parser.parse_args() + + logging.basicConfig(format='%(asctime)s: %(message)s', + level=logging.DEBUG if args.verbose else logging.INFO) + logging.getLogger('urllib3').setLevel(logging.CRITICAL) + + client = omegaup.api.Client(username=args.username, + password=args.password, + api_token=args.api_token, + url=args.url) + + if env.get('GITHUB_ACTIONS'): + commit = env['GITHUB_SHA'] + else: + commit = subprocess.check_output(['git', 'rev-parse', 'HEAD'], + universal_newlines=True).strip() + + rootDirectory = problems.repositoryRoot() + + for problem in problems.problems(allProblems=args.all, + rootDirectory=rootDirectory, + problemPaths=args.problem_paths): + uploadProblem( + client, + os.path.join(rootDirectory, problem.path), + commitMessage=f'Deployed automatically from commit {commit}', + canCreate=args.can_create) + + +if __name__ == '__main__': + _main() \ No newline at end of file