mirror of
https://github.com/volcengine/verl.git
synced 2025-10-20 13:43:50 +08:00
> [!WARNING]
> We are [immigrating to `ruff` as the linter and formatter and
`pre-commit` as the managing
tool](https://github.com/volcengine/verl/pull/1010).
>
> If your branch is based on a previous commit using `yapf` and
`pylint`, simply merging might trigger overwhelming linting errors,
while **you are only expected to resolve ones in the files related to
your PR**.
>
> To resolve this issue, please try the following workaround to only
include the files you **really changed** in the PR:
>
> 1. In your branch, fix linting and format with `ruff`: `ruff check
--fix && ruff-format`
> 2. Squash into a single commit in a new branch: `git reset --soft
$(git merge-base main HEAD) && git add -A && git commit -m "feat: ..."`
> 3. Merge with the latest main: `git merge origin/main`
> 4. Force push to your branch: `git push --force`
We add the reminder above to the documentation to tell contributors how
to avoid overwhelming linting errors.
### Motivation
According to dicussion in #896, this PR immigrates from yapf & pylint to
ruff based on pre-commit, which allows unified version control and
automatic hook on committing.
### Summary
The `pre-commit` hook and CI
- checks staged / committed files in commits / PR's
- checks all files each month (This should fail before we fix all the
files by the ruff standard)
### Explanation for the Failing CI Workflow `pre-commit`
For now, we only apply `ruff format` and `ruff check --fix` **without
resolving all the errors**, since there are too many errors to resolve,
which causes the CI workflow `pre-commit` fails.
For resolving the remaining errors, we leave to future commits.
Specifically, the `pre-commit` hook and CI will require every commit to
fix its related files with `ruff`, which will fix all the files
incrementally.
### Reviewing Suggestion
The commit
3d93f51ba8
is huge since we apply `ruff` to all the files. To review the main
changes, please check the commits before and after it.
72 lines
2.3 KiB
Python
72 lines
2.3 KiB
Python
# Copyright 2024 Bytedance Ltd. and/or its affiliates
|
|
#
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
# you may not use this file except in compliance with the License.
|
|
# You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
|
|
import base64
|
|
import json
|
|
import multiprocessing
|
|
import pickle
|
|
import zlib
|
|
|
|
# Reuse `run_test` for convenience
|
|
from verl.utils.reward_score.prime_code.testing_util import run_test
|
|
|
|
|
|
def _temp_run(in_outs, generation, debug, result, metadata_list, timeout):
|
|
res, metadata = run_test(in_outs, test=generation, debug=debug, timeout=timeout)
|
|
result.append(res)
|
|
metadata_list.append(metadata)
|
|
|
|
|
|
def check_correctness(in_outs, generation, timeout, debug=True):
|
|
"""Check correctness of code generation with a global timeout.
|
|
The global timeout is to catch some extreme/rare cases not handled by the timeouts
|
|
inside `run_test`"""
|
|
|
|
manager = multiprocessing.Manager()
|
|
result = manager.list()
|
|
metadata_list = manager.list()
|
|
p = multiprocessing.Process(
|
|
target=_temp_run,
|
|
args=(in_outs, generation, debug, result, metadata_list, timeout),
|
|
)
|
|
p.start()
|
|
p.join(timeout=(timeout + 1) * len(in_outs["inputs"]) + 5)
|
|
if p.is_alive():
|
|
p.kill()
|
|
if not result:
|
|
# consider that all tests failed
|
|
result = [[-1 for i in range(len(in_outs["inputs"]))]]
|
|
if debug:
|
|
print("global timeout")
|
|
return result[0], metadata_list[0]
|
|
|
|
|
|
def compute_score(completion, test_cases):
|
|
solution = completion.split("```python")[-1].split("```")[0]
|
|
|
|
# extract test cases
|
|
try:
|
|
in_outs = json.loads(test_cases)
|
|
except:
|
|
in_outs = json.loads(pickle.loads(zlib.decompress(base64.b64decode(test_cases.encode("utf-8")))))
|
|
|
|
success = False
|
|
try:
|
|
res, metadata = check_correctness(in_outs=in_outs, generation=solution, timeout=6, debug=False)
|
|
success = all(map(lambda x: x == True, res))
|
|
except Exception:
|
|
pass
|
|
|
|
return success
|