diff --git a/Jenkinsfile b/Jenkinsfile index 9af78a62..f6168e63 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -22,7 +22,7 @@ pipeline { agent { docker { image 'fnlp:torch-1.11' - args '-u root:root -v ${JENKINS_HOME}/html/docs:/docs -v ${JENKINS_HOME}/html/_ci:/ci --gpus all --shm-size 256M' + args '-u root:root -v ${JENKINS_HOME}/html/docs:/docs -v ${JENKINS_HOME}/html/_ci:/ci --gpus all --shm-size 1G' } } steps { @@ -55,19 +55,19 @@ pipeline { sh 'FASTNLP_BACKEND=paddle pytest ./tests/core/controllers/test_trainer_paddle.py --durations=0 --co' } } - stage('Test Jittor') { - agent { - docker { - image 'fnlp:jittor' - args '-u root:root -v ${JENKINS_HOME}/html/docs:/docs -v ${JENKINS_HOME}/html/_ci:/ci --gpus all' - } - } - steps { - // sh 'pip install fitlog' - // sh 'pytest ./tests --html=test_results.html --self-contained-html' - sh 'pytest ./tests --durations=0 -m jittor --co' - } - } + // stage('Test Jittor') { + // agent { + // docker { + // image 'fnlp:jittor' + // args '-u root:root -v ${JENKINS_HOME}/html/docs:/docs -v ${JENKINS_HOME}/html/_ci:/ci --gpus all' + // } + // } + // steps { + // // sh 'pip install fitlog' + // // sh 'pytest ./tests --html=test_results.html --self-contained-html' + // sh 'pytest ./tests --durations=0 -m jittor --co' + // } + // } } } } diff --git a/tests/core/callbacks/test_load_best_model_callback_torch.py b/tests/core/callbacks/test_load_best_model_callback_torch.py index 9f346003..07576599 100644 --- a/tests/core/callbacks/test_load_best_model_callback_torch.py +++ b/tests/core/callbacks/test_load_best_model_callback_torch.py @@ -73,7 +73,6 @@ def model_and_optimizers(request): @pytest.mark.torch -@pytest.mark.temp @pytest.mark.parametrize("driver,device", [("torch", [0, 1]), ("torch", 1), ("torch", "cpu")]) # ("torch", "cpu"), ("torch", [0, 1]), ("torch", 1) @magic_argv_env_context def test_load_best_model_callback( @@ -83,7 +82,6 @@ def test_load_best_model_callback( ): for save_folder in ['save_models', None]: for only_state_dict in [True, False]: - logger.error(f"{save_folder}, {only_state_dict}") callbacks = [LoadBestModelCallback(monitor='acc', only_state_dict=only_state_dict, save_folder=save_folder)] trainer = Trainer( diff --git a/tests/core/dataloaders/test_utils.py b/tests/core/dataloaders/test_utils.py index e5a7cc9e..7e97e97d 100644 --- a/tests/core/dataloaders/test_utils.py +++ b/tests/core/dataloaders/test_utils.py @@ -12,14 +12,12 @@ def test_no_args(): def f(*args, a, b, **kwarg): c = 100 call_kwargs = _match_param(f, demo) - with pytest.raises(RuntimeError): - f(a=1, b=2) + f(a=1, b=2) def f(a, *args, b, **kwarg): c = 100 call_kwargs = _match_param(f, demo) - with pytest.raises(RuntimeError): - f(a=1, b=2) + f(a=1, b=2) @recover_logger diff --git a/tests/core/dataloaders/torch_dataloader/test_fdl.py b/tests/core/dataloaders/torch_dataloader/test_fdl.py index 6d20754a..8ed7441b 100644 --- a/tests/core/dataloaders/torch_dataloader/test_fdl.py +++ b/tests/core/dataloaders/torch_dataloader/test_fdl.py @@ -147,13 +147,14 @@ class TestFdl: assert 'Parameter:prefetch_factor' in out[0] @recover_logger + @pytest.mark.temp def test_version_111(self): if parse_version(torch.__version__) <= parse_version('1.7'): pytest.skip("Torch version smaller than 1.7") logger.set_stdout() ds = DataSet({"x": [[1, 2], [2, 3, 4], [4, 5, 6, 7]] * 10, "y": [1, 0, 1] * 10}) with Capturing() as out: - dl = TorchDataLoader(ds, num_workers=2, prefetch_factor=3, shuffle=False) + dl = TorchDataLoader(ds, num_workers=0, prefetch_factor=2, generator=torch.Generator(), shuffle=False) for idx, batch in enumerate(dl): assert len(batch['x'])==1 assert batch['x'][0].tolist() == ds[idx]['x']