diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000..74736a9 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,38 @@ +name: Build + +on: + - push + # branches: [main, development] + - pull_request + +jobs: + test: + runs-on: ${{ matrix.os }} + strategy: + matrix: + # os: [ubuntu-latest, macOS-latest] + os: [macOS-latest] + #python-version: ['3.6'] + python-version: ['3.7', '3.8'] + + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install pytest + pip install -e . + - name: coverall + run: | + pip install coveralls + pip install pytest-cover + - name: run pytest + run: | + py.test tods/tests/ --cov=tods + coveralls + - name: codecov + uses: codecov/codecov-action@v1 diff --git a/.github/workflows/unit_test.yml b/.github/workflows/unit_test.yml new file mode 100644 index 0000000..74736a9 --- /dev/null +++ b/.github/workflows/unit_test.yml @@ -0,0 +1,38 @@ +name: Build + +on: + - push + # branches: [main, development] + - pull_request + +jobs: + test: + runs-on: ${{ matrix.os }} + strategy: + matrix: + # os: [ubuntu-latest, macOS-latest] + os: [macOS-latest] + #python-version: ['3.6'] + python-version: ['3.7', '3.8'] + + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install pytest + pip install -e . + - name: coverall + run: | + pip install coveralls + pip install pytest-cover + - name: run pytest + run: | + py.test tods/tests/ --cov=tods + coveralls + - name: codecov + uses: codecov/codecov-action@v1 diff --git a/README.md b/README.md index 7698923..7fd1f17 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,9 @@ Logo -[![Build Status](https://travis-ci.org/datamllab/tods.svg?branch=master)](https://travis-ci.org/datamllab/tods) +[![Actions Status](https://github.com/datamllab/tods/workflows/Build/badge.svg)](https://github.com/datamllab/tods/actions) + +[![codecov](https://codecov.io/gh/datamllab/tods/branch/dev/graph/badge.svg?token=M90ZCVTRBF)](https://codecov.io/gh/datamllab/tods) [中文文档](README.zh-CN.md) diff --git a/setup.py b/setup.py index 516a10e..8d29791 100644 --- a/setup.py +++ b/setup.py @@ -35,7 +35,7 @@ setup( ] }, install_requires=[ - 'tamu_d3m==2021.11.24', + 'tamu_d3m==2022.01.21', 'tamu_axolotl', 'numpy<=1.21.2', 'combo', diff --git a/tods/searcher/brute_force_search.py b/tods/searcher/brute_force_search.py index d3c53ce..1e0ddef 100644 --- a/tods/searcher/brute_force_search.py +++ b/tods/searcher/brute_force_search.py @@ -192,19 +192,19 @@ def _generate_pipline(combinations): # pragma: no cover # The first three steps are fixed # Step 0: dataset_to_dataframe step_0 = PrimitiveStep(primitive=index.get_primitive('d3m.primitives.tods.data_processing.dataset_to_dataframe')) - step_0.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='inputs.0') + step_0.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data='inputs.0') step_0.add_output('produce') pipeline_description.add_step(step_0) # Step 1: column_parser step_1 = PrimitiveStep(primitive=index.get_primitive('d3m.primitives.tods.data_processing.column_parser')) - step_1.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='steps.0.produce') + step_1.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data='steps.0.produce') step_1.add_output('produce') pipeline_description.add_step(step_1) # Step 2: extract_columns_by_semantic_types(attributes) step_2 = PrimitiveStep(primitive=index.get_primitive('d3m.primitives.tods.data_processing.extract_columns_by_semantic_types')) - step_2.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='steps.1.produce') + step_2.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data='steps.1.produce') step_2.add_output('produce') step_2.add_hyperparameter(name='semantic_types', argument_type=ArgumentType.VALUE, data=['https://metadata.datadrivendiscovery.org/types/Attribute']) @@ -212,7 +212,7 @@ def _generate_pipline(combinations): # pragma: no cover # Step 3: extract_columns_by_semantic_types(targets) step_3 = PrimitiveStep(primitive=index.get_primitive('d3m.primitives.tods.data_processing.extract_columns_by_semantic_types')) - step_3.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='steps.0.produce') + step_3.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data='steps.0.produce') step_3.add_output('produce') step_3.add_hyperparameter(name='semantic_types', argument_type=ArgumentType.VALUE, data=['https://metadata.datadrivendiscovery.org/types/TrueTarget']) @@ -222,30 +222,30 @@ def _generate_pipline(combinations): # pragma: no cover targets = 'steps.3.produce' tods_step_4 = PrimitiveStep(primitive=index.get_primitive(combination[0])) - tods_step_4.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference=attributes) + tods_step_4.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data=attributes) tods_step_4.add_output('produce') pipeline_description.add_step(tods_step_4) tods_step_5 = PrimitiveStep(primitive=index.get_primitive(combination[1])) - tods_step_5.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='steps.4.produce') + tods_step_5.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data='steps.4.produce') tods_step_5.add_output('produce') pipeline_description.add_step(tods_step_5) tods_step_6= PrimitiveStep(primitive=index.get_primitive(combination[2])) - tods_step_6.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='steps.5.produce') + tods_step_6.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data='steps.5.produce') tods_step_6.add_output('produce') tods_step_6.add_hyperparameter(name='contamination', argument_type=ArgumentType.VALUE, data=combination[3]) pipeline_description.add_step(tods_step_6) #tods_step_7 = PrimitiveStep(primitive=index.get_primitive(combination[3])) - #tods_step_7.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='steps.6.produce') + #tods_step_7.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data='steps.6.produce') #tods_step_7.add_output('produce') #pipeline_description.add_step(tods_step_7) # Finalize the pipeline final_step = PrimitiveStep(primitive=index.get_primitive('d3m.primitives.tods.data_processing.construct_predictions')) - final_step.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='steps.6.produce') - final_step.add_argument(name='reference', argument_type=ArgumentType.CONTAINER, data_reference='steps.1.produce') + final_step.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data='steps.6.produce') + final_step.add_argument(name='reference', argument_type=ArgumentType.CONTAINER, data='steps.1.produce') final_step.add_output('produce') pipeline_description.add_step(final_step)