diff --git a/.github/workflows/build_documentation.yml b/.github/workflows/build_documentation.yml deleted file mode 100644 index a35b6f62d80..00000000000 --- a/.github/workflows/build_documentation.yml +++ /dev/null @@ -1,95 +0,0 @@ -name: Documentation Build - -on: [pull_request, workflow_dispatch] - -env: - # Following env vars when changed will "reset" the mentioned cache, - # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... - # You should go up in number, if you go down (or repeat a previous value) - # you might end up reusing a previous cache if it haven't been deleted already. - # It applies 7 days retention policy by default. - RESET_EXAMPLES_CACHE: 3 - RESET_DOC_BUILD_CACHE: 3 - RESET_AUTOSUMMARY_CACHE: 3 - - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - docs-style: - name: "Check documentation style" - runs-on: ubuntu-latest - steps: - - name: "Check documentation style" - uses: ansys/actions/doc-style@v4 - with: - token: ${{ secrets.GITHUB_TOKEN }} - vale-config: "doc/.vale.ini" - vale-version: "2.29.6" - - docs_build: - runs-on: ubuntu-20.04 - - steps: - - uses: actions/checkout@v4 - - - name: Setup Python - uses: actions/setup-python@v4 - with: - python-version: 3.8 - - - name: Install pyaedt - run: | - pip install .[doc] - - - name: Verify pyaedt can be imported - run: python -c "import pyaedt" - - - name: Retrieve PyAEDT version - id: version - run: | - echo "::set-output name=PYAEDT_VERSION::$(python -c "from pyaedt import __version__; print(__version__)")" - echo "PyAEDT version is: $(python -c "from pyaedt import __version__; print(__version__)")" - - # - name: Cache docs build directory - # uses: actions/cache@v3 - # with: - # path: doc/build - # key: doc-build-v${{ env.RESET_DOC_BUILD_CACHE }}-${{ steps.version.outputs.PYAEDT_VERSION }}-${{ github.sha }} - # restore-keys: | - # doc-build-v${{ env.RESET_DOC_BUILD_CACHE }}-${{ steps.version.outputs.PYAEDT_VERSION }} - # - name: Cache autosummary - # uses: actions/cache@v3 - # with: - # path: doc/source/**/_autosummary/*.rst - # key: autosummary-v${{ env.RESET_AUTOSUMMARY_CACHE }}-${{ steps.version.outputs.PYAEDT_VERSION }}-${{ github.sha }} - # restore-keys: | - # autosummary-v${{ env.RESET_AUTOSUMMARY_CACHE }}-${{ steps.version.outputs.PYAEDT_VERSION }} - - - name: Install doc build requirements - run: | - sudo apt install graphviz - - # run doc build, without creating the examples directory - # note that we have to add the examples file here since it won't - # be created as gallery is disabled on linux. - - name: Documentation Build - run: | - make -C doc clean - mkdir doc/source/examples -p - echo $'Examples\n========' > doc/source/examples/index.rst - make -C doc html SPHINXOPTS="-j auto -w build_errors.txt -N" - - # Verify that sphinx generates no warnings - - name: Check for warnings - run: | - python doc/print_errors.py - -# - name: Upload Documentation -# uses: actions/upload-artifact@v3 -# with: -# name: Documentation -# path: doc/_build/html -# retention-days: 7 diff --git a/.github/workflows/cpython_linux.yml b/.github/workflows/cpython_linux.yml deleted file mode 100644 index 4d10c19eff2..00000000000 --- a/.github/workflows/cpython_linux.yml +++ /dev/null @@ -1,91 +0,0 @@ -name: Linux_CPython_UnitTests - -env: - python.version: '3.10' - python.venv: 'testvenv' - # Following env vars when changed will "reset" the mentioned cache, - # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... - # You should go up in number, if you go down (or repeat a previous value) - # you might end up reusing a previous cache if it haven't been deleted already. - # It applies 7 days retention policy by default. - RESET_PIP_CACHE: 0 - PACKAGE_NAME: PyAEDT - - -on: - workflow_dispatch: - inputs: - logLevel: - description: 'Log level' - required: true - default: 'warning' - tags: - description: 'Linux CPython daily' - schedule: # UTC at 0100 - - cron: '0 1 * * *' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - test: - runs-on: [Linux, pyaedt] - strategy: - matrix: - python-version: [ '3.10' ] - steps: - - uses: actions/checkout@v3 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - architecture: 'x86' - - - name: 'Install pyaedt' - run: | - python -m venv .pyaedt_test_env - export ANSYSEM_ROOT232=/apps/AnsysEM/v232/Linux64 - export LD_LIBRARY_PATH=$ANSYSEM_ROOT232/common/mono/Linux64/lib64:$LD_LIBRARY_PATH - source .pyaedt_test_env/bin/activate - python -m pip install --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org pip -U - python -m pip install --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org wheel setuptools -U - python -c "import sys; print(sys.executable)" - pip install .[tests] - pip install --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org pytest-azurepipelines - python -c "import pyaedt; print('Imported pyaedt')" - - - name: 'Unit testing' - uses: nick-fields/retry@v2 - with: - max_attempts: 3 - retry_on: error - timeout_minutes: 60 - command: | - export ANS_NODEPCHECK=1 - export ANSYSEM_ROOT232=/apps/AnsysEM/v232/Linux64 - export LD_LIBRARY_PATH=$ANSYSEM_ROOT232/common/mono/Linux64/lib64:$LD_LIBRARY_PATH - source .pyaedt_test_env/bin/activate - pytest --tx 6*popen --durations=50 --dist loadfile -v _unittest - - - name: 'Unit testing Solvers' - continue-on-error: true - uses: nick-fields/retry@v2 - with: - max_attempts: 3 - retry_on: error - timeout_minutes: 60 - command: | - export ANS_NODEPCHECK=1 - export ANSYSEM_ROOT232=/apps/AnsysEM/v232/Linux64 - export LD_LIBRARY_PATH=$ANSYSEM_ROOT232/common/mono/Linux64/lib64:$LD_LIBRARY_PATH - source .pyaedt_test_env/bin/activate - pytest --tx 2*popen --durations=50 --dist loadfile -v _unittest_solvers - - - name: Upload pytest test results - uses: actions/upload-artifact@v3 - with: - name: pytest-results - path: junit/test-results.xml - if: ${{ always() }} diff --git a/.github/workflows/ironpython.yml b/.github/workflows/ironpython.yml deleted file mode 100644 index b169c44d1c3..00000000000 --- a/.github/workflows/ironpython.yml +++ /dev/null @@ -1,40 +0,0 @@ -# This is a basic workflow to help you get started with Actions - -name: CI_Ironpython - - -# Controls when the workflow will run -on: - # Triggers the workflow on push or pull request events but only for the main branch - pull_request: - branches: [ main ] - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: [windows-latest, pyaedt] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - - name: 'Run Unit Tests in Ironpython' - timeout-minutes: 60 - run: | - $processA = start-process 'cmd' -ArgumentList '/c .\_unittest_ironpython\run_unittests_batchmode.cmd' -PassThru - $processA.WaitForExit() - get-content .\_unittest_ironpython\pyaedt_unit_test_ironpython.log - $test_errors_failures = Select-String -Path .\_unittest_ironpython\pyaedt_unit_test_ironpython.log -Pattern "TextTestResult errors=" - if ($test_errors_failures -ne $null) - { - exit 1 - } - else - { - exit 0 - } diff --git a/.github/workflows/label.yml b/.github/workflows/label.yml deleted file mode 100644 index c801a34dd44..00000000000 --- a/.github/workflows/label.yml +++ /dev/null @@ -1,92 +0,0 @@ -name: Labeler -on: - pull_request: - push: - branches: [ main ] - paths: - - '../labels.yml' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - - label-syncer: - name: Syncer - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: micnncim/action-label-syncer@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - labeler: - name: Set labels - needs: [label-syncer] - permissions: - contents: read - pull-requests: write - runs-on: ubuntu-latest - steps: - - # Label based on modified files - - name: Label based on changed files - uses: actions/labeler@v4 - with: - repo-token: "${{ secrets.GITHUB_TOKEN }}" - sync-labels: '' - - # Label based on branch name - - uses: actions-ecosystem/action-add-labels@v1 - if: | - startsWith(github.event.pull_request.head.ref, 'doc') || - startsWith(github.event.pull_request.head.ref, 'docs') - with: - labels: documentation - - - uses: actions-ecosystem/action-add-labels@v1 - if: | - startsWith(github.event.pull_request.head.ref, 'maint') || - startsWith(github.event.pull_request.head.ref, 'no-ci') || - startsWith(github.event.pull_request.head.ref, 'ci') - with: - labels: maintenance - - - uses: actions-ecosystem/action-add-labels@v1 - if: startsWith(github.event.pull_request.head.ref, 'feat') - with: - labels: | - enhancement - - - uses: actions-ecosystem/action-add-labels@v1 - if: | - startsWith(github.event.pull_request.head.ref, 'fix') || - startsWith(github.event.pull_request.head.ref, 'patch') - with: - labels: bug - - - uses: actions-ecosystem/action-add-labels@v1 - if: | - startsWith(github.event.pull_request.head.ref, 'test') - with: - labels: testing - - commenter: - runs-on: ubuntu-latest - steps: - - name: Suggest to add labels - uses: peter-evans/create-or-update-comment@v3 - # Execute only when no labels have been applied to the pull request - if: toJSON(github.event.pull_request.labels.*.name) == '{}' - with: - issue-number: ${{ github.event.pull_request.number }} - body: | - Please add one of the following labels to add this contribution to the Release Notes :point_down: - - [bug](https://github.com/ansys/pyaedt/pulls?q=label%3Abug+) - - [documentation](https://github.com/ansys/pyaedt/pulls?q=label%3Adocumentation+) - - [enhancement](https://github.com/ansys/pyaedt/pulls?q=label%3Aenhancement+) - - [good first issue](https://github.com/ansys/pyaedt/pulls?q=label%3Agood+first+issue) - - [maintenance](https://github.com/ansys/pyaedt/pulls?q=label%3Amaintenance+) - - [release](https://github.com/ansys/pyaedt/pulls?q=label%3Arelease+) - - [testing](https://github.com/ansys/pyaedt/pulls?q=label%Atesting+) diff --git a/.github/workflows/nightly-docs.yml b/.github/workflows/nightly-docs.yml deleted file mode 100644 index 2aa10e4283c..00000000000 --- a/.github/workflows/nightly-docs.yml +++ /dev/null @@ -1,133 +0,0 @@ -name: Nightly Documentation Build - -on: - workflow_dispatch: - schedule: # UTC at 0400 - - cron: '0 4 * * *' - -env: - DOCUMENTATION_CNAME: 'aedt.docs.pyansys.com' - MEILISEARCH_API_KEY: ${{ secrets.MEILISEARCH_API_KEY }} - MEILISEARCH_PUBLIC_API_KEY: ${{ secrets.MEILISEARCH_PUBLIC_API_KEY }} - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - docs_build: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - - - name: Setup Python - uses: actions/setup-python@v4 - with: - python-version: 3.8 - - - name: Install pyaedt - run: | - pip install . - - - name: Install doc build requirements - run: | - pip install .[doc] - - - name: Full Documentation Build - run: | - make -C doc phtml - - - name: Upload documentation HTML artifact - uses: actions/upload-artifact@v3 - with: - name: documentation-html - path: doc/_build/html - retention-days: 7 - - - name: Upload HTML documentation artifact - uses: actions/upload-artifact@v3 - with: - name: documentation-html-edb - path: doc/_build/html/EDBAPI - retention-days: 7 - - docs_upload: - needs: docs_build - runs-on: ubuntu-latest - steps: - - - name: Deploy development documentation - uses: ansys/actions/doc-deploy-dev@v4 - with: - cname: ${{ env.DOCUMENTATION_CNAME }} - token: ${{ secrets.GITHUB_TOKEN }} - - doc-index-dev: - name: "Deploy dev docs index" - runs-on: ubuntu-latest - needs: docs_upload - steps: - - uses: actions/checkout@v4 - - - uses: actions/download-artifact@v3 - - - name: Display structure of downloaded files - run: ls -R - - - name: "Deploy the dev documentation index for PyAEDT API" - uses: ansys/actions/doc-deploy-index@v4 - with: - cname: ${{ env.DOCUMENTATION_CNAME }}/version/dev - index-name: pyaedt-vdev - host-url: ${{ vars.MEILISEARCH_HOST_URL }} - api-key: ${{ env.MEILISEARCH_API_KEY }} - pymeilisearchopts: --stop_urls \"EDBAPI\" # Add EDB API as another index to show it in dropdown button - - - name: "Deploy the dev documentation index for EDB API" - uses: ansys/actions/doc-deploy-index@v4 - with: - cname: ${{ env.DOCUMENTATION_CNAME }}/version/dev/EDBAPI/ - index-name: pyedb-vdev - host-url: ${{ vars.MEILISEARCH_HOST_URL }} - api-key: ${{ env.MEILISEARCH_API_KEY }} - doc-artifact-name: documentation-html-edb # Add only EDB API as page in this index. - pymeilisearchopts: --port 8001 # serve in another port as 8000 is deafult - - # docstring_testing: - # runs-on: Windows - - # steps: - # - uses: actions/checkout@v4 - - # - name: Setup Python - # uses: actions/setup-python@v2 - # with: - # python-version: 3.8 - - # - name: 'Create virtual env' - # run: | - # python -m venv testenv - # testenv\Scripts\Activate.ps1 - # python -m pip install pip -U - # python -m pip install wheel setuptools -U - # python -c "import sys; print(sys.executable)" - - # - name: 'Install pyaedt' - # run: | - # testenv\Scripts\Activate.ps1 - # pip install . --use-feature=in-tree-build - # cd _unittest - # python -c "import pyaedt; print('Imported pyaedt')" - - # - name: Install testing requirements - # run: | - # testenv\Scripts\Activate.ps1 - # pip install -r requirements/requirements_test.txt - # pip install pytest-azurepipelines - - # - name: Docstring testing - # run: | - # testenv\Scripts\Activate.ps1 - # pytest -v pyaedt/desktop.py pyaedt/icepak.py - # pytest -v pyaedt/desktop.py pyaedt/hfss.py diff --git a/.github/workflows/full_documentation.yml b/.github/workflows/test_wf.yml similarity index 52% rename from .github/workflows/full_documentation.yml rename to .github/workflows/test_wf.yml index 4f80360f4f5..e6e5cffe12d 100644 --- a/.github/workflows/full_documentation.yml +++ b/.github/workflows/test_wf.yml @@ -1,20 +1,20 @@ # This is a basic workflow to help you get started with Actions -name: FullDocumentation +name: TESTFullDocumentation env: python.version: '3.10' python.venv: 'testvenv' - DOCUMENTATION_CNAME: 'aedt.docs.pyansys.com' - MEILISEARCH_API_KEY: ${{ secrets.MEILISEARCH_API_KEY }} - MEILISEARCH_HOST_URL: https://backend.search.pyansys.com - MEILISEARCH_PUBLIC_API_KEY: ${{ secrets.MEILISEARCH_PUBLIC_API_KEY }} + # Controls when the workflow will run on: # Triggers the workflow on push or pull request events but only for the main branch push: - tags: - - v* + branches: + - main + pull_request: + branches: [ main ] + workflow_dispatch: inputs: logLevel: @@ -26,7 +26,6 @@ on: concurrency: group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true # A workflow run is made up of one or more jobs that can run sequentially or in parallel jobs: @@ -78,6 +77,13 @@ jobs: # testenv\Scripts\Activate.ps1 # .\doc\make.bat pdf + # TODO: update upload action once path exclusion is implemented (if ever) + # see https://github.com/actions/upload-pages-artifact/issues/33 + - name: Remove files to not upload + run: | + echo "Removing files not wanted in the uploaded artifact" + Remove-Item doc/_build/html/.doctrees -Recurse -Force + - name: Upload HTML documentation artifact uses: actions/upload-artifact@v3 with: @@ -107,66 +113,3 @@ jobs: # files: | # doc/_build/pdf - doc-deploy-stable: - name: Deploy stable documentation - runs-on: ubuntu-latest - needs: full_documentation - if: github.event_name == 'push' && contains(github.ref, 'refs/tags') - steps: - - name: Deploy the stable documentation - uses: ansys/actions/doc-deploy-stable@v4 - with: - cname: ${{ env.DOCUMENTATION_CNAME }} - token: ${{ secrets.GITHUB_TOKEN }} - python-version: ${{ matrix.python-version }} - - - doc-index-stable: - name: "Deploy stable docs index" - if: github.event_name == 'push' && contains(github.ref, 'refs/tags') - runs-on: ubuntu-latest - needs: doc-deploy-stable - - steps: - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: ${{ env.MAIN_PYTHON_VERSION }} - - - uses: actions/checkout@v4 - - - uses: actions/download-artifact@v3 - - - name: Display structure of downloaded files - run: ls -R - - - name: Install the package requirements - run: pip install -e . - - - name: Get the version to PyMeilisearch - run: | - VERSION=$(python -c "from pyaedt import __version__; print('.'.join(__version__.split('.')[:2]))") - VERSION_MEILI=$(python -c "from pyaedt import __version__; print('-'.join(__version__.split('.')[:2]))") - echo "Calculated VERSION: $VERSION" - echo "Calculated VERSION_MEILI: $VERSION_MEILI" - echo "VERSION=$VERSION" >> $GITHUB_ENV - echo "VERSION_MEILI=$VERSION_MEILI" >> $GITHUB_ENV - - - name: "Deploy the stable documentation index for PyAEDT API" - uses: ansys/actions/doc-deploy-index@v4 - with: - cname: ${{ env.DOCUMENTATION_CNAME }}/version/${{ env.VERSION }} - index-name: pyaedt-v${{ env.VERSION_MEILI }} - host-url: ${{ vars.MEILISEARCH_HOST_URL }} - api-key: ${{ env.MEILISEARCH_API_KEY }} - pymeilisearchopts: --stop_urls \"EDBAPI\" # Add EDB API as another index. - - - name: "Deploy the stable documentation index for EDB API" - uses: ansys/actions/doc-deploy-index@v4 - with: - cname: ${{ env.DOCUMENTATION_CNAME }}/version/${{ env.VERSION }}/EDBAPI/ - index-name: pyedb-v${{ env.VERSION_MEILI }} - host-url: ${{ vars.MEILISEARCH_HOST_URL }} - api-key: ${{ env.MEILISEARCH_API_KEY }} - doc-artifact-name: documentation-html-edb # Add only EDB API as page in this index. - pymeilisearchopts: --port 8001 #serve in another port diff --git a/.github/workflows/unit_test_prerelease.yml b/.github/workflows/unit_test_prerelease.yml deleted file mode 100644 index 1c79c395a48..00000000000 --- a/.github/workflows/unit_test_prerelease.yml +++ /dev/null @@ -1,89 +0,0 @@ -name: CI_PreRelease - -env: - python.version: '3.8' - python.venv: 'testvenv' - # Following env vars when changed will "reset" the mentioned cache, - # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... - # You should go up in number, if you go down (or repeat a previous value) - # you might end up reusing a previous cache if it haven't been deleted already. - # It applies 7 days retention policy by default. - RESET_PIP_CACHE: 0 - PACKAGE_NAME: PyAEDT -# Controls when the workflow will run -on: - workflow_dispatch: - inputs: - logLevel: - description: 'Log level' - required: true - default: 'warning' - tags: - description: 'Linux CPython daily' - schedule: # UTC at 0300 - - cron: '0 3 * * *' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: [pre_release] - strategy: - matrix: - python-version: ['3.8'] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: 'Create virtual env' - run: | - python -m venv testenv - testenv\Scripts\Activate.ps1 - python -m pip install pip -U - python -m pip install wheel setuptools -U - python -c "import sys; print(sys.executable)" - - - name: 'Install pyaedt' - run: | - testenv\Scripts\Activate.ps1 - pip install .[tests] - pip install pytest-azurepipelines - Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination "testenv\Lib\site-packages\vtkmodules" -Force - Copy-Item -Path "C:\actions-runner\local_config.json" -Destination "_unittest" -Force - mkdir tmp - cd tmp - python -c "import pyaedt; print('Imported pyaedt')" - - # - name: "Check licences of packages" - # uses: pyansys/pydpf-actions/check-licenses@v2.0 - - - name: 'Unit testing' - timeout-minutes: 60 - run: | - testenv\Scripts\Activate.ps1 - Set-Item -Path env:PYTHONMALLOC -Value "malloc" - pytest --tx 6*popen --durations=50 --dist loadfile -v --cov=pyaedt --cov-report=xml --junitxml=junit/test-results.xml --cov-report=html _unittest - - - uses: codecov/codecov-action@v3 - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - if: matrix.python-version == '3.8' - name: 'Upload coverage to Codecov' - - - name: Upload pytest test results - uses: actions/upload-artifact@v3 - with: - name: pytest-results - path: junit/test-results.xml - # Use always() to always run this step to publish test results when there are test failures - if: ${{ always() }} - diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml deleted file mode 100644 index 8242a8d8749..00000000000 --- a/.github/workflows/unit_tests.yml +++ /dev/null @@ -1,172 +0,0 @@ -name: CI - -env: - python.version: '3.10' - python.venv: 'testvenv' - # Following env vars when changed will "reset" the mentioned cache, - # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... - # You should go up in number, if you go down (or repeat a previous value) - # you might end up reusing a previous cache if it hasn't been deleted already. - # It applies 7 days retention policy by default. - RESET_PIP_CACHE: 0 - PACKAGE_NAME: PyAEDT -# Controls when the workflow will run -on: - # Triggers the workflow on push or pull request events but only for the main branch - push: - tags: - - 'v*' - branches: - - main - pull_request: - branches: [ main ] - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build_solvers: - # The type of runner that the job will run on - runs-on: [ windows-latest, pyaedt ] - strategy: - matrix: - python-version: [ '3.10' ] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: 'Create virtual env' - run: | - Remove-Item D:\Temp\* -Recurse -Force -ErrorAction SilentlyContinue - python -m venv testenv_s - testenv_s\Scripts\Activate.ps1 - python -m pip install pip -U - python -m pip install wheel setuptools -U - python -c "import sys; print(sys.executable)" - - - name: 'Install pyaedt' - run: | - testenv_s\Scripts\Activate.ps1 - pip install . - pip install .[tests] - pip install pytest-azurepipelines - Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination "testenv_s\Lib\site-packages\vtkmodules" -Force - mkdir tmp - cd tmp - python -c "import pyaedt; print('Imported pyaedt')" - - # - name: "Check licences of packages" - # uses: pyansys/pydpf-actions/check-licenses@v2.0 - - - name: 'Unit testing' - uses: nick-fields/retry@v2 - with: - max_attempts: 3 - retry_on: error - timeout_minutes: 40 - command: | - testenv_s\Scripts\Activate.ps1 - Set-Item -Path env:PYTHONMALLOC -Value "malloc" - pytest --durations=50 -v --cov=pyaedt --cov-report=xml --cov-report=html --junitxml=junit/test-results.xml _unittest_solvers - - - uses: codecov/codecov-action@v3 - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - with: - name: 'Upload coverage to Codecov' - - - name: Upload pytest test results - uses: actions/upload-artifact@v3 - with: - name: pytest-results - path: junit/test-results.xml - # Use always() to always run this step to publish test results when there are test failures - if: ${{ always() }} - - - build: - # The type of runner that the job will run on - runs-on: [windows-latest, pyaedt] - strategy: - matrix: - python-version: ['3.10'] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: 'Create virtual env' - run: | - Remove-Item D:\Temp\* -Recurse -Force -ErrorAction SilentlyContinue - python -m venv testenv - testenv\Scripts\Activate.ps1 - python -m pip install pip -U - python -m pip install wheel setuptools -U - python -c "import sys; print(sys.executable)" - - - name: 'Install pyaedt' - run: | - testenv\Scripts\Activate.ps1 - pip install . - pip install .[tests] - pip install pytest-azurepipelines - Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination "testenv\Lib\site-packages\vtkmodules" -Force - mkdir tmp - cd tmp - python -c "import pyaedt; print('Imported pyaedt')" - - # - name: "Check licences of packages" - # uses: pyansys/pydpf-actions/check-licenses@v2.0 - - - name: 'Unit testing' - uses: nick-fields/retry@v2 - with: - max_attempts: 3 - retry_on: error - timeout_minutes: 50 - command: | - testenv\Scripts\Activate.ps1 - Set-Item -Path env:PYTHONMALLOC -Value "malloc" - pytest -n 6 --dist loadfile --durations=50 -v --cov=pyaedt --cov-report=xml --cov-report=html --junitxml=junit/test-results.xml _unittest - - - uses: codecov/codecov-action@v3 - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - with: - name: 'Upload coverage to Codecov' - - - name: Upload pytest test results - uses: actions/upload-artifact@v3 - with: - name: pytest-results - path: junit/test-results.xml - # Use always() to always run this step to publish test results when there are test failures - if: ${{ always() }} - - - name: 'Build and validate source distribution' - run: | - testenv\Scripts\Activate.ps1 - python -m pip install build twine - python -m build - python -m twine check dist/* - - - name: "Builds and uploads to PyPI" - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') - run: | - testenv\Scripts\Activate.ps1 - python setup.py sdist - python -m pip install twine - python -m twine upload --skip-existing dist/* - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/unit_tests_solvers.bkp b/.github/workflows/unit_tests_solvers.bkp deleted file mode 100644 index 19080841594..00000000000 --- a/.github/workflows/unit_tests_solvers.bkp +++ /dev/null @@ -1,103 +0,0 @@ -name: CI_Solvers - -env: - python.version: '3.10' - python.venv: 'testvenv' - # Following env vars when changed will "reset" the mentioned cache, - # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... - # You should go up in number, if you go down (or repeat a previous value) - # you might end up reusing a previous cache if it hasn't been deleted already. - # It applies 7 days retention policy by default. - RESET_PIP_CACHE: 0 - PACKAGE_NAME: PyAEDT -# Controls when the workflow will run -on: - # Triggers the workflow on push or pull request events but only for the main branch - push: - tags: - - 'v*' - branches: - - main - pull_request: - branches: [ main ] - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: [windows-latest, pyaedt] - strategy: - matrix: - python-version: ['3.10'] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: 'Create virtual env' - run: | - Remove-Item D:\Temp\* -Recurse -Force - python -m venv testenv_s - testenv_s\Scripts\Activate.ps1 - python -m pip install pip -U - python -m pip install wheel setuptools -U - python -c "import sys; print(sys.executable)" - - - name: 'Install pyaedt' - run: | - testenv_s\Scripts\Activate.ps1 - pip install . - pip install .[tests] - pip install pytest-azurepipelines - Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination "testenv_s\Lib\site-packages\vtkmodules" -Force - mkdir tmp - cd tmp - python -c "import pyaedt; print('Imported pyaedt')" - - # - name: "Check licences of packages" - # uses: pyansys/pydpf-actions/check-licenses@v2.0 - - - name: 'Unit testing' - timeout-minutes: 40 - run: | - testenv_s\Scripts\Activate.ps1 - Set-Item -Path env:PYTHONMALLOC -Value "malloc" - pytest --durations=50 -v --cov=pyaedt --cov-report=xml --cov-report=html --junitxml=junit/test-results.xml _unittest_solvers - - - uses: codecov/codecov-action@v3 - if: matrix.python-version == '3.10' - name: 'Upload coverage to Codecov' - - - name: Upload pytest test results - uses: actions/upload-artifact@v3 - with: - name: pytest-results - path: junit/test-results.xml - # Use always() to always run this step to publish test results when there are test failures - if: ${{ always() }} - - - name: 'Build and validate source distribution' - run: | - testenv_s\Scripts\Activate.ps1 - python -m pip install build twine - python -m build - python -m twine check dist/* - - - name: "Builds and uploads to PyPI" - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') - run: | - testenv_s\Scripts\Activate.ps1 - python setup.py sdist - python -m pip install twine - python -m twine upload --skip-existing dist/* - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/wheelhouse.yml b/.github/workflows/wheelhouse.yml deleted file mode 100644 index 2f8a07db1ff..00000000000 --- a/.github/workflows/wheelhouse.yml +++ /dev/null @@ -1,90 +0,0 @@ -# This is a basic workflow to help you get started with Actions - -name: WheelHouse - -env: - python.venv: 'testvenv' - # Following env vars when changed will "reset" the mentioned cache, - # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... - # You should go up in number, if you go down (or repeat a previous value) - # you might end up reusing a previous cache if it haven't been deleted already. - # It applies 7 days retention policy by default. - RESET_PIP_CACHE: 0 - PACKAGE_NAME: PyAEDT -# Controls when the workflow will run -on: - # Triggers the workflow on push or pull request events but only for the main branch - push: - tags: - - 'v*' - - v* - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: [windows-latest] - strategy: - matrix: - python-version: [ 3.7, 3.8, 3.9, '3.10'] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: 'Create virtual env' - run: | - python -m venv testenv - testenv\Scripts\Activate.ps1 - python -m pip install pip -U - python -m pip install wheel setuptools -U - python -c "import sys; print(sys.executable)" - pip install .[all] - pip install jupyterlab - - - - name: Retrieve PyAEDT version - run: | - testenv\Scripts\Activate.ps1 - echo "::set-output name=PYAEDT_VERSION::$(python -c "from pyaedt import __version__; print(__version__)")" - echo "PyAEDT version is: $(python -c "from pyaedt import __version__; print(__version__)")" - id: version - - - name: Generate wheelhouse - run: | - testenv\Scripts\Activate.ps1 - $packages=$(pip freeze) - # Iterate over the packages and generate wheels - foreach ($package in $packages) { - echo "Generating wheel for $package" - pip wheel "$package" -w wheelhouse - } - - - name: Zip wheelhouse - uses: vimtor/action-zip@v1 - with: - files: wheelhouse - dest: ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-${{ runner.os }}-${{ matrix.python-version }}.zip - - - name: Upload Wheelhouse - uses: actions/upload-artifact@v3 - with: - name: ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-${{ runner.os }}-${{ matrix.python-version }} - path: '*.zip' - retention-days: 7 - - - name: Release - uses: softprops/action-gh-release@v1 - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') - with: - generate_release_notes: true - files: | - ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-${{ runner.os }}-${{ matrix.python-version }}.zip diff --git a/.github/workflows/wheelhouse_linux.yml b/.github/workflows/wheelhouse_linux.yml deleted file mode 100644 index af755cdb9b8..00000000000 --- a/.github/workflows/wheelhouse_linux.yml +++ /dev/null @@ -1,89 +0,0 @@ -# This is a basic workflow to help you get started with Actions - -name: WheelHouse Linux - -env: - python.venv: 'testvenv' - # Following env vars when changed will "reset" the mentioned cache - # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... - # You should go up in number. If you go down (or repeat a previous value), - # you might end up reusing a previous cache if it hasn't been deleted already. - # It applies a 7-day retention policy by default. - RESET_PIP_CACHE: 0 - PACKAGE_NAME: PyAEDT -# Controls when the workflow will run -on: - # Triggers the workflow on push or pull request events but only for the main branch - push: - tags: - - 'v*' - - v* - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: ubuntu-20.04 - strategy: - matrix: - python-version: [ 3.7, 3.8, 3.9, '3.10'] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: Install pyaedt - run: | - pip install .[all] - pip install jupyterlab - - - name: Verify pyaedt can be imported - run: python -c "import pyaedt" - - - name: Retrieve PyAEDT version - run: | - echo "::set-output name=PYAEDT_VERSION::$(python -c "from pyaedt import __version__; print(__version__)")" - echo "PyAEDT version is: $(python -c "from pyaedt import __version__; print(__version__)")" - id: version - - - name: Generate wheelhouse - run: | - pip install wheel setuptools -U - pip install --upgrade pip - pip wheel . -w wheelhouse - export wheellist=$(pip freeze) - for file in $wheellist; do - if [[ $file != *"@"* ]] && [[ $file != *"pyaedt"* ]]; then - pip wheel $file -w wheelhouse - fi - done - continue-on-error: true - - - name: Zip wheelhouse - uses: vimtor/action-zip@v1 - with: - files: wheelhouse - dest: ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-wheelhouse-${{ runner.os }}-${{ matrix.python-version }}.zip - - - name: Upload Wheelhouse - uses: actions/upload-artifact@v3 - with: - name: ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-wheelhouse-${{ runner.os }}-${{ matrix.python-version }} - path: '*.zip' - retention-days: 7 - - - name: Release - uses: softprops/action-gh-release@v1 - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') - with: - generate_release_notes: true - files: | - ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-wheelhouse-${{ runner.os }}-${{ matrix.python-version }}.zip \ No newline at end of file