Browse Source

Merge branch 'main' into CURA-10896_improve_add_printer_wizard

Saumya Jain 1 year ago
parent
commit
4a8a8d35f6

+ 146 - 151
.github/workflows/conan-package-create.yml

@@ -1,158 +1,153 @@
 name: Create and Upload Conan package
 
 on:
-    workflow_call:
-        inputs:
-            project_name:
-                required: true
-                type: string
-
-            recipe_id_full:
-                required: true
-                type: string
-
-            build_id:
-                required: true
-                type: number
-
-            build_info:
-                required: false
-                default: true
-                type: boolean
-
-            recipe_id_latest:
-                required: false
-                type: string
-
-            runs_on:
-                required: true
-                type: string
-
-            python_version:
-                required: true
-                type: string
-
-            conan_config_branch:
-                required: false
-                type: string
-
-            conan_logging_level:
-                required: false
-                type: string
-
-            conan_clean_local_cache:
-                required: false
-                type: boolean
-                default: false
-
-            conan_upload_community:
-                required: false
-                default: true
-                type: boolean
+  workflow_call:
+    inputs:
+      project_name:
+        required: true
+        type: string
+
+      recipe_id_full:
+        required: true
+        type: string
+
+      build_id:
+        required: true
+        type: number
+
+      build_info:
+        required: false
+        default: true
+        type: boolean
+
+      recipe_id_latest:
+        required: false
+        type: string
+
+      runs_on:
+        required: true
+        type: string
+
+      python_version:
+        required: true
+        type: string
+
+      conan_config_branch:
+        required: false
+        type: string
+
+      conan_logging_level:
+        required: false
+        type: string
+
+      conan_clean_local_cache:
+        required: false
+        type: boolean
+        default: false
+
+      conan_upload_community:
+        required: false
+        default: true
+        type: boolean
 
 env:
-    CONAN_LOGIN_USERNAME: ${{ secrets.CONAN_USER }}
-    CONAN_PASSWORD: ${{ secrets.CONAN_PASS }}
-    CONAN_LOG_RUN_TO_OUTPUT: 1
-    CONAN_LOGGING_LEVEL: ${{ inputs.conan_logging_level }}
-    CONAN_NON_INTERACTIVE: 1
+  CONAN_LOGIN_USERNAME: ${{ secrets.CONAN_USER }}
+  CONAN_PASSWORD: ${{ secrets.CONAN_PASS }}
+  CONAN_LOG_RUN_TO_OUTPUT: 1
+  CONAN_LOGGING_LEVEL: ${{ inputs.conan_logging_level }}
+  CONAN_NON_INTERACTIVE: 1
 
 jobs:
-    conan-package-create:
-        runs-on: ${{ inputs.runs_on }}
-
-        steps:
-            -   name: Checkout
-                uses: actions/checkout@v3
-
-            -   name: Setup Python and pip
-                uses: actions/setup-python@v4
-                with:
-                    python-version: ${{ inputs.python_version }}
-                    cache: 'pip'
-                    cache-dependency-path: .github/workflows/requirements-conan-package.txt
-
-            -   name: Install Python requirements for runner
-                run: pip install -r https://raw.githubusercontent.com/Ultimaker/Cura/main/.github/workflows/requirements-conan-package.txt
-                #  Note the runner requirements are always installed from the main branch in the Ultimaker/Cura repo
-
-            -   name: Use Conan download cache (Bash)
-                if: ${{ runner.os != 'Windows' }}
-                run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
-
-            -   name: Use Conan download cache (Powershell)
-                if: ${{ runner.os == 'Windows' }}
-                run: conan config set storage.download_cache="C:\Users\runneradmin\.conan\conan_download_cache"
-
-            -   name: Cache Conan local repository packages (Bash)
-                uses: actions/cache@v3
-                if: ${{ runner.os != 'Windows' }}
-                with:
-                    path: |
-                        $HOME/.conan/data
-                        $HOME/.conan/conan_download_cache
-                    key: conan-${{ inputs.runs_on }}-${{ runner.arch }}-create-cache
-
-            -   name: Cache Conan local repository packages (Powershell)
-                uses: actions/cache@v3
-                if: ${{ runner.os == 'Windows' }}
-                with:
-                    path: |
-                        C:\Users\runneradmin\.conan\data
-                        C:\.conan
-                        C:\Users\runneradmin\.conan\conan_download_cache
-                    key: conan-${{ inputs.runs_on }}-${{ runner.arch }}-create-cache
-
-            -   name: Install MacOS system requirements
-                if:  ${{ runner.os == 'Macos' }}
-                run: brew install autoconf automake ninja
-
-            # NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
-            #       This is maybe because grub caches the disk it uses last time, which is recreated each time.
-            -   name: Install Linux system requirements
-                if: ${{ runner.os == 'Linux' }}
-                run: |
-                    sudo rm /var/cache/debconf/config.dat
-                    sudo dpkg --configure -a
-                    sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
-                    sudo apt update
-                    sudo apt upgrade
-                    sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config flex bison -y
-
-            -   name: Install GCC-12 on ubuntu-22.04
-                if: ${{ startsWith(inputs.runs_on, 'ubuntu-22.04') }}
-                run: |
-                    sudo apt install g++-12 gcc-12 -y
-                    sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 12
-                    sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-12 12
-
-            -   name: Use GCC-10 on ubuntu-20.04
-                if: ${{ startsWith(inputs.runs_on, 'ubuntu-20.04') }}
-                run: |
-                    sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 10
-                    sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-10 10
-
-            -   name: Create the default Conan profile
-                run:  conan profile new default --detect
-
-            -   name: Get Conan configuration from branch
-                if: ${{ inputs.conan_config_branch != '' }}
-                run: conan config install https://github.com/Ultimaker/conan-config.git -a "-b ${{ inputs.conan_config_branch }}"
-
-            -   name: Get Conan configuration
-                if: ${{ inputs.conan_config_branch == '' }}
-                run: conan config install https://github.com/Ultimaker/conan-config.git
-
-            -   name: Add Cura private Artifactory remote
-                run: conan remote add cura-private https://ultimaker.jfrog.io/artifactory/api/conan/cura-private True
-
-            -   name: Create the Packages
-                run: conan install ${{ inputs.recipe_id_full }} --build=missing --update
-
-            -   name: Upload the Package(s)
-                if: ${{ always() && inputs.conan_upload_community }}
-                run: conan upload ${{ inputs.recipe_id_full }} -r cura --all -c
-
-            -   name: Upload the Package(s) to the private Artifactory
-                if: ${{ always() && ! inputs.conan_upload_community }}
-                run: conan upload ${{ inputs.recipe_id_full }} -r cura-private --all -c
+  conan-package-create:
+    runs-on: ${{ inputs.runs_on }}
+
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v3
+
+      - name: Setup Python and pip
+        uses: actions/setup-python@v4
+        with:
+          python-version: ${{ inputs.python_version }}
+          cache: 'pip'
+          cache-dependency-path: .github/workflows/requirements-conan-package.txt
+
+      - name: Install Python requirements for runner
+        run: pip install -r https://raw.githubusercontent.com/Ultimaker/Cura/main/.github/workflows/requirements-conan-package.txt
+        #  Note the runner requirements are always installed from the main branch in the Ultimaker/Cura repo
+
+      - name: Use Conan download cache (Bash)
+        if: ${{ runner.os != 'Windows' }}
+        run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
+
+      - name: Use Conan download cache (Powershell)
+        if: ${{ runner.os == 'Windows' }}
+        run: conan config set storage.download_cache="C:\Users\runneradmin\.conan\conan_download_cache"
+
+      - name: Cache Conan local repository packages (Bash)
+        uses: actions/cache@v3
+        if: ${{ runner.os != 'Windows' }}
+        with:
+          path: |
+            $HOME/.conan/data
+            $HOME/.conan/conan_download_cache
+          key: conan-${{ inputs.runs_on }}-${{ runner.arch }}-create-cache
+
+      - name: Cache Conan local repository packages (Powershell)
+        uses: actions/cache@v3
+        if: ${{ runner.os == 'Windows' }}
+        with:
+          path: |
+            C:\Users\runneradmin\.conan\data
+            C:\.conan
+            C:\Users\runneradmin\.conan\conan_download_cache
+          key: conan-${{ inputs.runs_on }}-${{ runner.arch }}-create-cache
+
+      - name: Install MacOS system requirements
+        if: ${{ runner.os == 'Macos' }}
+        run: brew install autoconf automake ninja
+
+      # NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
+      #       This is maybe because grub caches the disk it uses last time, which is recreated each time.
+      - name: Install Linux system requirements
+        if: ${{ runner.os == 'Linux' }}
+        run: |
+          sudo rm /var/cache/debconf/config.dat
+          sudo dpkg --configure -a
+          sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
+          sudo apt update
+          sudo apt upgrade
+          sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config flex bison -y
+
+      - name: Install GCC-132 on ubuntu
+        if: ${{ startsWith(inputs.runs_on, 'ubuntu') }}
+        run: |
+          sudo apt install g++-13 gcc-13 -y
+          sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13
+          sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-13 13
+
+      - name: Create the default Conan profile
+        run: conan profile new default --detect
+
+      - name: Get Conan configuration from branch
+        if: ${{ inputs.conan_config_branch != '' }}
+        run: conan config install https://github.com/Ultimaker/conan-config.git -a "-b ${{ inputs.conan_config_branch }}"
+
+      - name: Get Conan configuration
+        run: |
+          conan config install https://github.com/Ultimaker/conan-config.git
+          conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
+
+      - name: Add Cura private Artifactory remote
+        run: conan remote add cura-private https://ultimaker.jfrog.io/artifactory/api/conan/cura-private True
+
+      - name: Create the Packages
+        run: conan install ${{ inputs.recipe_id_full }} --build=missing --update -c tools.build:skip_test=True
+
+      - name: Upload the Package(s)
+        if: ${{ always() && inputs.conan_upload_community }}
+        run: conan upload ${{ inputs.recipe_id_full }} -r cura --all -c
+
+      - name: Upload the Package(s) to the private Artifactory
+        if: ${{ always() && ! inputs.conan_upload_community }}
+        run: conan upload ${{ inputs.recipe_id_full }} -r cura-private --all -c

+ 19 - 14
.github/workflows/conan-package.yml

@@ -49,15 +49,15 @@ on:
       - '[1-9].[0-9][0-9].[0-9]*'
 
 env:
-    CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
-    CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
-    CONAN_LOGIN_USERNAME_CURA_CE: ${{ secrets.CONAN_USER }}
-    CONAN_PASSWORD_CURA_CE: ${{ secrets.CONAN_PASS }}
-    CONAN_LOG_RUN_TO_OUTPUT: 1
-    CONAN_LOGGING_LEVEL: ${{ inputs.conan_logging_level }}
-    CONAN_NON_INTERACTIVE: 1
-
-permissions: {}
+  CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
+  CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
+  CONAN_LOGIN_USERNAME_CURA_CE: ${{ secrets.CONAN_USER }}
+  CONAN_PASSWORD_CURA_CE: ${{ secrets.CONAN_PASS }}
+  CONAN_LOG_RUN_TO_OUTPUT: 1
+  CONAN_LOGGING_LEVEL: ${{ inputs.conan_logging_level }}
+  CONAN_NON_INTERACTIVE: 1
+
+permissions: { }
 jobs:
   conan-recipe-version:
     permissions:
@@ -103,18 +103,23 @@ jobs:
           sudo apt update
           sudo apt upgrade
           sudo apt install efibootmgr build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config flex bison g++-12 gcc-12 -y
-          sudo apt install g++-12 gcc-12 -y
-          sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 12
-          sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-12 12
+
+      - name: Install GCC-13
+        run: |
+          sudo apt install g++-13 gcc-13 -y
+          sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13
+          sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-13 13
 
       - name: Create the default Conan profile
         run: conan profile new default --detect --force
 
       - name: Get Conan configuration
-        run: conan config install https://github.com/Ultimaker/conan-config.git
+        run: |
+          conan config install https://github.com/Ultimaker/conan-config.git
+          conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
 
       - name: Create the Packages
-        run: conan create . ${{ needs.conan-recipe-version.outputs.recipe_id_full }} --build=missing --update -o ${{ needs.conan-recipe-version.outputs.project_name }}:devtools=True
+        run: conan create . ${{ needs.conan-recipe-version.outputs.recipe_id_full }} --build=missing --update -o ${{ needs.conan-recipe-version.outputs.project_name }}:devtools=True -c tools.build:skip_test=True
 
       - name: Create the latest alias
         if: always()

+ 93 - 92
.github/workflows/conan-recipe-export.yml

@@ -1,106 +1,107 @@
 name: Export Conan Recipe to server
 
 on:
-    workflow_call:
-        inputs:
-            recipe_id_full:
-                required: true
-                type: string
+  workflow_call:
+    inputs:
+      recipe_id_full:
+        required: true
+        type: string
 
-            recipe_id_latest:
-                required: false
-                type: string
+      recipe_id_latest:
+        required: false
+        type: string
 
-            runs_on:
-                required: true
-                type: string
+      runs_on:
+        required: true
+        type: string
 
-            python_version:
-                required: true
-                type: string
+      python_version:
+        required: true
+        type: string
 
-            conan_config_branch:
-                required: false
-                type: string
+      conan_config_branch:
+        required: false
+        type: string
 
-            conan_logging_level:
-                required: false
-                type: string
+      conan_logging_level:
+        required: false
+        type: string
 
-            conan_export_binaries:
-                required: false
-                type: boolean
+      conan_export_binaries:
+        required: false
+        type: boolean
 
-            conan_upload_community:
-                required: false
-                default: true
-                type: boolean
+      conan_upload_community:
+        required: false
+        default: true
+        type: boolean
 
 env:
-    CONAN_LOGIN_USERNAME: ${{ secrets.CONAN_USER }}
-    CONAN_PASSWORD: ${{ secrets.CONAN_PASS }}
-    CONAN_LOG_RUN_TO_OUTPUT: 1
-    CONAN_LOGGING_LEVEL: ${{ inputs.conan_logging_level }}
-    CONAN_NON_INTERACTIVE: 1
+  CONAN_LOGIN_USERNAME: ${{ secrets.CONAN_USER }}
+  CONAN_PASSWORD: ${{ secrets.CONAN_PASS }}
+  CONAN_LOG_RUN_TO_OUTPUT: 1
+  CONAN_LOGGING_LEVEL: ${{ inputs.conan_logging_level }}
+  CONAN_NON_INTERACTIVE: 1
 
 jobs:
-    package-export:
-        runs-on: ${{ inputs.runs_on }}
-
-        steps:
-            -   name: Checkout project
-                uses: actions/checkout@v3
-
-            -   name: Setup Python and pip
-                uses: actions/setup-python@v4
-                with:
-                    python-version: ${{ inputs.python_version }}
-                    cache: 'pip'
-                    cache-dependency-path: .github/workflows/requirements-conan-package.txt
-
-            -   name: Install Python requirements and Create default Conan profile
-                run: |
-                    pip install -r https://raw.githubusercontent.com/Ultimaker/Cura/main/.github/workflows/requirements-conan-package.txt
-                    conan profile new default --detect
-                #  Note the runner requirements are always installed from the main branch in the Ultimaker/Cura repo
-
-            -   name: Cache Conan local repository packages
-                uses: actions/cache@v3
-                with:
-                    path: $HOME/.conan/data
-                    key: ${{ runner.os }}-conan-export-cache
-
-            -   name: Get Conan configuration from branch
-                if: ${{ inputs.conan_config_branch != '' }}
-                run: conan config install https://github.com/Ultimaker/conan-config.git -a "-b ${{ inputs.conan_config_branch }}"
-
-            -   name: Get Conan configuration
-                if: ${{ inputs.conan_config_branch == '' }}
-                run: conan config install https://github.com/Ultimaker/conan-config.git
-
-            -   name: Add Cura private Artifactory remote
-                run: conan remote add cura-private https://ultimaker.jfrog.io/artifactory/api/conan/cura-private True
-
-            -   name: Export the Package (binaries)
-                if: ${{ inputs.conan_export_binaries }}
-                run: conan create . ${{ inputs.recipe_id_full }} --build=missing --update
-
-            -   name: Export the Package
-                if: ${{ !inputs.conan_export_binaries }}
-                run: conan export . ${{ inputs.recipe_id_full }}
-
-            -   name: Create the latest alias
-                if: always()
-                run: conan alias ${{ inputs.recipe_id_latest }} ${{ inputs.recipe_id_full }}
-
-            -   name: Upload the Package(s)
-                if: ${{ always() && inputs.conan_upload_community }}
-                run: |
-                    conan upload ${{ inputs.recipe_id_full }} -r cura --all -c
-                    conan upload ${{ inputs.recipe_id_latest }} -r cura -c
-
-            -   name: Upload the Package(s) to the private Artifactory
-                if: ${{ always() && ! inputs.conan_upload_community }}
-                run: |
-                    conan upload ${{ inputs.recipe_id_full }} -r cura-private --all -c
-                    conan upload ${{ inputs.recipe_id_latest }} -r cura-private -c
+  package-export:
+    runs-on: ${{ inputs.runs_on }}
+
+    steps:
+      - name: Checkout project
+        uses: actions/checkout@v3
+
+      - name: Setup Python and pip
+        uses: actions/setup-python@v4
+        with:
+          python-version: ${{ inputs.python_version }}
+          cache: 'pip'
+          cache-dependency-path: .github/workflows/requirements-conan-package.txt
+
+      - name: Install Python requirements and Create default Conan profile
+        run: |
+          pip install -r https://raw.githubusercontent.com/Ultimaker/Cura/main/.github/workflows/requirements-conan-package.txt
+          conan profile new default --detect
+        #  Note the runner requirements are always installed from the main branch in the Ultimaker/Cura repo
+
+      - name: Cache Conan local repository packages
+        uses: actions/cache@v3
+        with:
+          path: $HOME/.conan/data
+          key: ${{ runner.os }}-conan-export-cache
+
+      - name: Get Conan configuration from branch
+        if: ${{ inputs.conan_config_branch != '' }}
+        run: conan config install https://github.com/Ultimaker/conan-config.git -a "-b ${{ inputs.conan_config_branch }}"
+
+      - name: Get Conan configuration
+        run: |
+          conan config install https://github.com/Ultimaker/conan-config.git
+          conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
+
+      - name: Add Cura private Artifactory remote
+        run: conan remote add cura-private https://ultimaker.jfrog.io/artifactory/api/conan/cura-private True
+
+      - name: Export the Package (binaries)
+        if: ${{ inputs.conan_export_binaries }}
+        run: conan create . ${{ inputs.recipe_id_full }} --build=missing --update -c tools.build:skip_test=True
+
+      - name: Export the Package
+        if: ${{ !inputs.conan_export_binaries }}
+        run: conan export . ${{ inputs.recipe_id_full }}
+
+      - name: Create the latest alias
+        if: always()
+        run: conan alias ${{ inputs.recipe_id_latest }} ${{ inputs.recipe_id_full }}
+
+      - name: Upload the Package(s)
+        if: ${{ always() && inputs.conan_upload_community }}
+        run: |
+          conan upload ${{ inputs.recipe_id_full }} -r cura --all -c
+          conan upload ${{ inputs.recipe_id_latest }} -r cura -c
+
+      - name: Upload the Package(s) to the private Artifactory
+        if: ${{ always() && ! inputs.conan_upload_community }}
+        run: |
+          conan upload ${{ inputs.recipe_id_full }} -r cura-private --all -c
+          conan upload ${{ inputs.recipe_id_latest }} -r cura-private -c

+ 231 - 238
.github/workflows/linux.yml

@@ -2,270 +2,263 @@ name: Linux Installer
 run-name: ${{ inputs.cura_conan_version }} for Linux-${{ inputs.architecture }} by @${{ github.actor }}
 
 on:
-    workflow_dispatch:
-        inputs:
-            cura_conan_version:
-                description: 'Cura Conan Version'
-                default: 'cura/latest@ultimaker/testing'
-                required: true
-                type: string
-            conan_args:
-                description: 'Conan args: eq.: --require-override'
-                default: ''
-                required: false
-                type: string
-            enterprise:
-                description: 'Build Cura as an Enterprise edition'
-                default: false
-                required: true
-                type: boolean
-            staging:
-                description: 'Use staging API'
-                default: false
-                required: true
-                type: boolean
-            architecture:
-                description: 'Architecture'
-                required: true
-                default: 'X64'
-                type: choice
-                options:
-                    - X64
-            operating_system:
-                description: 'OS'
-                required: true
-                default: 'ubuntu-22.04'
-                type: choice
-                options:
-                    - ubuntu-22.04
-                    - ubuntu-20.04
-    workflow_call:
-        inputs:
-            cura_conan_version:
-                description: 'Cura Conan Version'
-                default: 'cura/latest@ultimaker/testing'
-                required: true
-                type: string
-            conan_args:
-                description: 'Conan args: eq.: --require-override'
-                default: ''
-                required: false
-                type: string
-            enterprise:
-                description: 'Build Cura as an Enterprise edition'
-                default: false
-                required: true
-                type: boolean
-            staging:
-                description: 'Use staging API'
-                default: false
-                required: true
-                type: boolean
-            architecture:
-                description: 'Architecture'
-                required: true
-                default: 'X64'
-                type: string
-            operating_system:
-                description: 'OS'
-                required: true
-                default: 'ubuntu-22.04'
-                type: string
+  workflow_dispatch:
+    inputs:
+      cura_conan_version:
+        description: 'Cura Conan Version'
+        default: 'cura/latest@ultimaker/testing'
+        required: true
+        type: string
+      conan_args:
+        description: 'Conan args: eq.: --require-override'
+        default: ''
+        required: false
+        type: string
+      enterprise:
+        description: 'Build Cura as an Enterprise edition'
+        default: false
+        required: true
+        type: boolean
+      staging:
+        description: 'Use staging API'
+        default: false
+        required: true
+        type: boolean
+      architecture:
+        description: 'Architecture'
+        required: true
+        default: 'X64'
+        type: choice
+        options:
+          - X64
+      operating_system:
+        description: 'OS'
+        required: true
+        default: 'ubuntu-22.04'
+        type: choice
+        options:
+          - ubuntu-22.04
+          - ubuntu-20.04
+  workflow_call:
+    inputs:
+      cura_conan_version:
+        description: 'Cura Conan Version'
+        default: 'cura/latest@ultimaker/testing'
+        required: true
+        type: string
+      conan_args:
+        description: 'Conan args: eq.: --require-override'
+        default: ''
+        required: false
+        type: string
+      enterprise:
+        description: 'Build Cura as an Enterprise edition'
+        default: false
+        required: true
+        type: boolean
+      staging:
+        description: 'Use staging API'
+        default: false
+        required: true
+        type: boolean
+      architecture:
+        description: 'Architecture'
+        required: true
+        default: 'X64'
+        type: string
+      operating_system:
+        description: 'OS'
+        required: true
+        default: 'ubuntu-22.04'
+        type: string
 
 env:
-    CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
-    CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
-    GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
-    CURA_CONAN_VERSION: ${{ inputs.cura_conan_version }}
-    ENTERPRISE: ${{ inputs.enterprise }}
-    STAGING: ${{ inputs.staging }}
+  CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
+  CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
+  GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
+  CURA_CONAN_VERSION: ${{ inputs.cura_conan_version }}
+  ENTERPRISE: ${{ inputs.enterprise }}
+  STAGING: ${{ inputs.staging }}
 
 jobs:
-    cura-installer-create:
-        runs-on: ${{ inputs.operating_system }}
+  cura-installer-create:
+    runs-on: ${{ inputs.operating_system }}
 
-        steps:
-            -   name: Checkout
-                uses: actions/checkout@v3
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v3
 
-            -   name: Setup Python and pip
-                uses: actions/setup-python@v4
-                with:
-                    python-version: '3.10.x'
-                    cache: 'pip'
-                    cache-dependency-path: .github/workflows/requirements-conan-package.txt
-
-            -   name: Install Python requirements for runner
-                run: pip install -r .github/workflows/requirements-conan-package.txt
+      - name: Setup Python and pip
+        uses: actions/setup-python@v4
+        with:
+          python-version: '3.10.x'
+          cache: 'pip'
+          cache-dependency-path: .github/workflows/requirements-conan-package.txt
 
-            -   name: Cache Conan local repository packages (Bash)
-                uses: actions/cache@v3
-                with:
-                    path: |
-                        $HOME/.conan/data
-                        $HOME/.conan/conan_download_cache
-                    key: conan-${{ runner.os }}-${{ runner.arch }}-installer-cache
+      - name: Install Python requirements for runner
+        run: pip install -r .github/workflows/requirements-conan-package.txt
 
-            -   name: Hack needed specifically for ubuntu-22.04 from mid-Feb 2023 onwards
-                if: ${{ startsWith(inputs.operating_system, 'ubuntu-22.04') }}
-                run: sudo apt remove libodbc2 libodbcinst2 unixodbc-common -y
+      - name: Cache Conan local repository packages (Bash)
+        uses: actions/cache@v3
+        with:
+          path: |
+            $HOME/.conan/data
+            $HOME/.conan/conan_download_cache
+          key: conan-${{ runner.os }}-${{ runner.arch }}-installer-cache
 
-            # NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
-            #       This is maybe because grub caches the disk it uses last time, which is recreated each time.
-            -   name: Install Linux system requirements
-                run: |
-                    sudo rm /var/cache/debconf/config.dat
-                    sudo dpkg --configure -a
-                    sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
-                    sudo apt update
-                    sudo apt upgrade
-                    sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config -y
-                    wget --no-check-certificate --quiet https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage -O $GITHUB_WORKSPACE/appimagetool
-                    chmod +x $GITHUB_WORKSPACE/appimagetool
-                    echo "APPIMAGETOOL_LOCATION=$GITHUB_WORKSPACE/appimagetool" >> $GITHUB_ENV
+      - name: Hack needed specifically for ubuntu-22.04 from mid-Feb 2023 onwards
+        if: ${{ startsWith(inputs.operating_system, 'ubuntu-22.04') }}
+        run: sudo apt remove libodbc2 libodbcinst2 unixodbc-common -y
 
-            -   name: Install GCC-12 on ubuntu-22.04
-                if: ${{ startsWith(inputs.operating_system, 'ubuntu-22.04') }}
-                run: |
-                    sudo apt install g++-12 gcc-12 -y
-                    sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 12
-                    sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-12 12
+      # NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
+      #       This is maybe because grub caches the disk it uses last time, which is recreated each time.
+      - name: Install Linux system requirements
+        run: |
+          sudo rm /var/cache/debconf/config.dat
+          sudo dpkg --configure -a
+          sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
+          sudo apt update
+          sudo apt upgrade
+          sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config -y
+          wget --no-check-certificate --quiet https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage -O $GITHUB_WORKSPACE/appimagetool
+          chmod +x $GITHUB_WORKSPACE/appimagetool
+          echo "APPIMAGETOOL_LOCATION=$GITHUB_WORKSPACE/appimagetool" >> $GITHUB_ENV
 
-            -   name: Use GCC-10 on ubuntu-20.04
-                if: ${{ startsWith(inputs.operating_system, 'ubuntu-20.04') }}
-                run: |
-                    sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 10
-                    sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-10 10
+      - name: Install GCC-13
+        run: |
+          sudo apt install g++-13 gcc-13 -y
+          sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13
+          sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-13 13
 
-            -   name: Create the default Conan profile
-                run: conan profile new default --detect --force
+      - name: Create the default Conan profile
+        run: conan profile new default --detect --force
 
-            -   name: Configure GPG Key Linux (Bash)
-                run: echo -n "$GPG_PRIVATE_KEY" | base64 --decode | gpg --import
+      - name: Configure GPG Key Linux (Bash)
+        run: echo -n "$GPG_PRIVATE_KEY" | base64 --decode | gpg --import
 
-            -   name: Get Conan configuration
-                run: |
-                    conan config install https://github.com/Ultimaker/conan-config.git
-                    conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
+      - name: Get Conan configuration
+        run: |
+          conan config install https://github.com/Ultimaker/conan-config.git
+          conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
 
-            -   name: Use Conan download cache (Bash)
-                run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
+      - name: Use Conan download cache (Bash)
+        run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
 
-            -   name: Create the Packages (Bash)
-                run: conan install $CURA_CONAN_VERSION ${{ inputs.conan_args }} --build=missing --update -if cura_inst -g VirtualPythonEnv -o cura:enterprise=$ENTERPRISE -o cura:staging=$STAGING --json "cura_inst/conan_install_info.json"
+      - name: Create the Packages (Bash)
+        run: conan install $CURA_CONAN_VERSION ${{ inputs.conan_args }} --build=missing --update -if cura_inst -g VirtualPythonEnv -o cura:enterprise=$ENTERPRISE -o cura:staging=$STAGING --json "cura_inst/conan_install_info.json"
 
-            -   name: Upload the Package(s)
-                if: always()
-                run: |
-                    conan upload "*" -r cura --all -c
+      - name: Upload the Package(s)
+        if: always()
+        run: |
+          conan upload "*" -r cura --all -c
 
-            -   name: Set Environment variables for Cura (bash)
-                run: |
-                    . ./cura_inst/bin/activate_github_actions_env.sh
-                    . ./cura_inst/bin/activate_github_actions_version_env.sh
+      - name: Set Environment variables for Cura (bash)
+        run: |
+          . ./cura_inst/bin/activate_github_actions_env.sh
+          . ./cura_inst/bin/activate_github_actions_version_env.sh
 
-                # FIXME: This is a workaround to ensure that we use and pack a shared library for OpenSSL 1.1.1l. We currently compile
-                #  OpenSSL statically for CPython, but our Python Dependenies (such as PyQt6) require a shared library.
-                #  Because Conan won't allow for building the same library with two different options (easily) we need to install it explicitly
-                #  and do a manual copy to the VirtualEnv, such that Pyinstaller can find it.
+        # FIXME: This is a workaround to ensure that we use and pack a shared library for OpenSSL 1.1.1l. We currently compile
+        #  OpenSSL statically for CPython, but our Python Dependenies (such as PyQt6) require a shared library.
+        #  Because Conan won't allow for building the same library with two different options (easily) we need to install it explicitly
+        #  and do a manual copy to the VirtualEnv, such that Pyinstaller can find it.
 
-            -   name: Install OpenSSL shared
-                run: conan install openssl/1.1.1l@_/_ --build=missing --update -o openssl:shared=True -g deploy
+      - name: Install OpenSSL shared
+        run: conan install openssl/1.1.1l@_/_ --build=missing --update -o openssl:shared=True -g deploy
 
-            -   name: Copy OpenSSL shared (Bash)
-                run: |
-                    cp ./openssl/lib/*.so* ./cura_inst/bin/ || true
-                    cp ./openssl/lib/*.dylib* ./cura_inst/bin/ || true                    
+      - name: Copy OpenSSL shared (Bash)
+        run: |
+          cp ./openssl/lib/*.so* ./cura_inst/bin/ || true
+          cp ./openssl/lib/*.dylib* ./cura_inst/bin/ || true                    
 
-            -   name: Create the Cura dist
-                run: pyinstaller ./cura_inst/UltiMaker-Cura.spec
+      - name: Create the Cura dist
+        run: pyinstaller ./cura_inst/UltiMaker-Cura.spec
 
-            -   name: Output the name file name and extension
-                id: filename
-                shell: python
-                run: |
-                    import os
-                    enterprise = "-Enterprise" if "${{ inputs.enterprise }}" == "true" else ""
-                    if "${{ inputs.operating_system }}" == "ubuntu-22.04":
-                        installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-linux-modern-${{ inputs.architecture }}"
-                    else:
-                        installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-linux-${{ inputs.architecture }}"
-                    output_env = os.environ["GITHUB_OUTPUT"]
-                    content = ""
-                    if os.path.exists(output_env):
-                        with open(output_env, "r") as f:
-                            content = f.read()
-                    with open(output_env, "w") as f:
-                        f.write(content)
-                        f.writelines(f"INSTALLER_FILENAME={installer_filename}\n")
+      - name: Output the name file name and extension
+        id: filename
+        shell: python
+        run: |
+          import os
+          enterprise = "-Enterprise" if "${{ inputs.enterprise }}" == "true" else ""
+          if "${{ inputs.operating_system }}" == "ubuntu-22.04":
+              installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-linux-modern-${{ inputs.architecture }}"
+          else:
+              installer_filename = f"UltiMaker-Cura-{os.getenv('CURA_VERSION_FULL')}{enterprise}-linux-${{ inputs.architecture }}"
+          output_env = os.environ["GITHUB_OUTPUT"]
+          content = ""
+          if os.path.exists(output_env):
+              with open(output_env, "r") as f:
+                  content = f.read()
+          with open(output_env, "w") as f:
+              f.write(content)
+              f.writelines(f"INSTALLER_FILENAME={installer_filename}\n")
 
-            -   name: Summarize the used Conan dependencies
-                shell: python
-                run: |
-                    import os
-                    import json
-                    from pathlib import Path
-                    
-                    conan_install_info_path = Path("cura_inst/conan_install_info.json")
-                    conan_info = {"installed": []}
-                    if os.path.exists(conan_install_info_path):
-                        with open(conan_install_info_path, "r") as f:
-                            conan_info = json.load(f)
-                    sorted_deps = sorted([dep["recipe"]["id"].replace('#', r' rev: ') for dep in conan_info["installed"]])
-                    
-                    summary_env = os.environ["GITHUB_STEP_SUMMARY"]
-                    content = ""
-                    if os.path.exists(summary_env):
-                        with open(summary_env, "r") as f:
-                            content = f.read()
-                    
-                    with open(summary_env, "w") as f:
-                        f.write(content)
-                        f.writelines("# ${{ steps.filename.outputs.INSTALLER_FILENAME }}\n")
-                        f.writelines("## Conan packages:\n")
-                        for dep in sorted_deps:
-                            f.writelines(f"`{dep}`\n")
+      - name: Summarize the used Conan dependencies
+        shell: python
+        run: |
+          import os
+          import json
+          from pathlib import Path
+          
+          conan_install_info_path = Path("cura_inst/conan_install_info.json")
+          conan_info = {"installed": []}
+          if os.path.exists(conan_install_info_path):
+              with open(conan_install_info_path, "r") as f:
+                  conan_info = json.load(f)
+          sorted_deps = sorted([dep["recipe"]["id"].replace('#', r' rev: ') for dep in conan_info["installed"]])
+          
+          summary_env = os.environ["GITHUB_STEP_SUMMARY"]
+          content = ""
+          if os.path.exists(summary_env):
+              with open(summary_env, "r") as f:
+                  content = f.read()
+          
+          with open(summary_env, "w") as f:
+              f.write(content)
+              f.writelines("# ${{ steps.filename.outputs.INSTALLER_FILENAME }}\n")
+              f.writelines("## Conan packages:\n")
+              for dep in sorted_deps:
+                  f.writelines(f"`{dep}`\n")
 
-            -   name: Summarize the used Python modules
-                shell: python
-                run: |
-                    import os
-                    import pkg_resources
-                    summary_env = os.environ["GITHUB_STEP_SUMMARY"]
-                    content = ""
-                    if os.path.exists(summary_env):
-                        with open(summary_env, "r") as f:
-                            content = f.read()
-                    
-                    with open(summary_env, "w") as f:
-                        f.write(content)
-                        f.writelines("## Python modules:\n")
-                        for package in pkg_resources.working_set:
-                            f.writelines(f"`{package.key}/{package.version}`\n")
+      - name: Summarize the used Python modules
+        shell: python
+        run: |
+          import os
+          import pkg_resources
+          summary_env = os.environ["GITHUB_STEP_SUMMARY"]
+          content = ""
+          if os.path.exists(summary_env):
+              with open(summary_env, "r") as f:
+                  content = f.read()
+          
+          with open(summary_env, "w") as f:
+              f.write(content)
+              f.writelines("## Python modules:\n")
+              for package in pkg_resources.working_set:
+                  f.writelines(f"`{package.key}/{package.version}`\n")
 
-            -   name: Create the Linux AppImage (Bash)
-                run: |
-                    python ../cura_inst/packaging/AppImage/create_appimage.py ./UltiMaker-Cura $CURA_VERSION_FULL "${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage"
-                    chmod +x "${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage"
-                working-directory: dist
+      - name: Create the Linux AppImage (Bash)
+        run: |
+          python ../cura_inst/packaging/AppImage/create_appimage.py ./UltiMaker-Cura $CURA_VERSION_FULL "${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage"
+          chmod +x "${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage"
+        working-directory: dist
 
-            -   name: Upload the AppImage
-                uses: actions/upload-artifact@v3
-                with:
-                    name: ${{ steps.filename.outputs.INSTALLER_FILENAME }}-AppImage
-                    path: |
-                        dist/${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage
-                    retention-days: 5
+      - name: Upload the AppImage
+        uses: actions/upload-artifact@v3
+        with:
+          name: ${{ steps.filename.outputs.INSTALLER_FILENAME }}-AppImage
+          path: |
+            dist/${{ steps.filename.outputs.INSTALLER_FILENAME }}.AppImage
+          retention-days: 5
 
-    notify-export:
-        if: ${{ always() }}
-        needs: [ cura-installer-create ]
+  notify-export:
+    if: ${{ always() }}
+    needs: [ cura-installer-create ]
 
-        uses: ultimaker/cura/.github/workflows/notify.yml@main
-        with:
-            success: ${{ contains(join(needs.*.result, ','), 'success') }}
-            success_title: "Create the Cura distributions"
-            success_body: "Installers for ${{ inputs.cura_conan_version }}"
-            failure_title: "Failed to create the Cura distributions"
-            failure_body: "Failed to create at least 1 installer for ${{ inputs.cura_conan_version }}"
-        secrets: inherit
+    uses: ultimaker/cura/.github/workflows/notify.yml@main
+    with:
+      success: ${{ contains(join(needs.*.result, ','), 'success') }}
+      success_title: "Create the Cura distributions"
+      success_body: "Installers for ${{ inputs.cura_conan_version }}"
+      failure_title: "Failed to create the Cura distributions"
+      failure_body: "Failed to create at least 1 installer for ${{ inputs.cura_conan_version }}"
+    secrets: inherit

+ 1 - 1
.github/workflows/requirements-conan-package.txt

@@ -1,2 +1,2 @@
-conan==1.60.2
+conan>=1.60.2,<2.0.0
 sip

+ 154 - 152
.github/workflows/unit-test.yml

@@ -2,163 +2,165 @@
 name: unit-test
 
 on:
-    push:
-        paths:
-            - 'plugins/**'
-            - 'resources/**'
-            - 'cura/**'
-            - 'icons/**'
-            - 'tests/**'
-            - 'packaging/**'
-            - '.github/workflows/conan-*.yml'
-            - '.github/workflows/unit-test.yml'
-            - '.github/workflows/notify.yml'
-            - '.github/workflows/requirements-conan-package.txt'
-            - 'requirements*.txt'
-            - 'conanfile.py'
-            - 'conandata.yml'
-            - 'GitVersion.yml'
-            - '*.jinja'
-        branches:
-            - main
-            - 'CURA-*'
-            - '[1-9]+.[0-9]+'
-        tags:
-            - '[0-9]+.[0-9]+.[0-9]+'
-            - '[0-9]+.[0-9]+-beta'
-    pull_request:
-        paths:
-            - 'plugins/**'
-            - 'resources/**'
-            - 'cura/**'
-            - 'icons/**'
-            - 'tests/**'
-            - 'packaging/**'
-            - '.github/workflows/conan-*.yml'
-            - '.github/workflows/unit-test.yml'
-            - '.github/workflows/notify.yml'
-            - '.github/workflows/requirements-conan-package.txt'
-            - 'requirements*.txt'
-            - 'conanfile.py'
-            - 'conandata.yml'
-            - 'GitVersion.yml'
-            - '*.jinja'
-        branches:
-            - main
-            - '[1-9]+.[0-9]+'
-        tags:
-            - '[0-9]+.[0-9]+.[0-9]+'
-            - '[0-9]+.[0-9]+-beta'
+  push:
+    paths:
+      - 'plugins/**'
+      - 'resources/**'
+      - 'cura/**'
+      - 'icons/**'
+      - 'tests/**'
+      - 'packaging/**'
+      - '.github/workflows/conan-*.yml'
+      - '.github/workflows/unit-test.yml'
+      - '.github/workflows/notify.yml'
+      - '.github/workflows/requirements-conan-package.txt'
+      - 'requirements*.txt'
+      - 'conanfile.py'
+      - 'conandata.yml'
+      - 'GitVersion.yml'
+      - '*.jinja'
+    branches:
+      - main
+      - 'CURA-*'
+      - '[1-9]+.[0-9]+'
+    tags:
+      - '[0-9]+.[0-9]+.[0-9]+'
+      - '[0-9]+.[0-9]+-beta'
+  pull_request:
+    paths:
+      - 'plugins/**'
+      - 'resources/**'
+      - 'cura/**'
+      - 'icons/**'
+      - 'tests/**'
+      - 'packaging/**'
+      - '.github/workflows/conan-*.yml'
+      - '.github/workflows/unit-test.yml'
+      - '.github/workflows/notify.yml'
+      - '.github/workflows/requirements-conan-package.txt'
+      - 'requirements*.txt'
+      - 'conanfile.py'
+      - 'conandata.yml'
+      - 'GitVersion.yml'
+      - '*.jinja'
+    branches:
+      - main
+      - '[1-9]+.[0-9]+'
+    tags:
+      - '[0-9]+.[0-9]+.[0-9]+'
+      - '[0-9]+.[0-9]+-beta'
 
 env:
-    CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
-    CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
-    CONAN_LOGIN_USERNAME_CURA_CE: ${{ secrets.CONAN_USER }}
-    CONAN_PASSWORD_CURA_CE: ${{ secrets.CONAN_PASS }}
-    CONAN_LOG_RUN_TO_OUTPUT: 1
-    CONAN_LOGGING_LEVEL: info
-    CONAN_NON_INTERACTIVE: 1
+  CONAN_LOGIN_USERNAME_CURA: ${{ secrets.CONAN_USER }}
+  CONAN_PASSWORD_CURA: ${{ secrets.CONAN_PASS }}
+  CONAN_LOGIN_USERNAME_CURA_CE: ${{ secrets.CONAN_USER }}
+  CONAN_PASSWORD_CURA_CE: ${{ secrets.CONAN_PASS }}
+  CONAN_LOG_RUN_TO_OUTPUT: 1
+  CONAN_LOGGING_LEVEL: info
+  CONAN_NON_INTERACTIVE: 1
 
 permissions:
   contents: read
 
 jobs:
-    conan-recipe-version:
-        uses: ultimaker/cura/.github/workflows/conan-recipe-version.yml@main
+  conan-recipe-version:
+    uses: ultimaker/cura/.github/workflows/conan-recipe-version.yml@main
+    with:
+      project_name: cura
+
+  testing:
+    runs-on: ubuntu-22.04
+    needs: [ conan-recipe-version ]
+
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v3
         with:
-            project_name: cura
-
-    testing:
-        runs-on: ubuntu-22.04
-        needs: [ conan-recipe-version ]
-
-        steps:
-            -   name: Checkout
-                uses: actions/checkout@v3
-                with:
-                    fetch-depth: 2
-
-            -   name: Setup Python and pip
-                uses: actions/setup-python@v4
-                with:
-                    python-version: '3.11.x'
-                    architecture: 'x64'
-                    cache: 'pip'
-                    cache-dependency-path: .github/workflows/requirements-conan-package.txt
-
-            -   name: Install Python requirements and Create default Conan profile
-                run: pip install -r requirements-conan-package.txt
-                working-directory: .github/workflows/
-
-            -   name: Use Conan download cache (Bash)
-                if: ${{ runner.os != 'Windows' }}
-                run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
-
-            -   name: Cache Conan local repository packages (Bash)
-                uses: actions/cache@v3
-                if: ${{ runner.os != 'Windows' }}
-                with:
-                    path: |
-                        $HOME/.conan/data
-                        $HOME/.conan/conan_download_cache
-                    key: conan-${{ runner.os }}-${{ runner.arch }}-unit-cache
-
-            # NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
-            #       This is maybe because grub caches the disk it uses last time, which is recreated each time.
-            -   name: Install Linux system requirements
-                if: ${{ runner.os == 'Linux' }}
-                run: |
-                    sudo rm /var/cache/debconf/config.dat
-                    sudo dpkg --configure -a
-                    sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
-                    sudo apt update
-                    sudo apt upgrade
-                    sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config -y
-
-            -   name: Install GCC-12 on ubuntu-22.04
-                run: |
-                    sudo apt install g++-12 gcc-12 -y
-                    sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 12
-                    sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-12 12
-                    
-            -   name: Get Conan configuration
-                run: conan config install https://github.com/Ultimaker/conan-config.git
-
-            -   name: Get Conan profile
-                run: conan profile new default --detect --force
-
-            -   name: Install dependencies
-                run: conan install . ${{ needs.conan-recipe-version.outputs.recipe_id_full }} --build=missing --update -o cura:devtools=True  -g VirtualPythonEnv -if venv
-
-            -   name: Upload the Dependency package(s)
-                run: conan upload "*" -r cura --all -c
-
-            -   name: Set Environment variables for Cura (bash)
-                if: ${{ runner.os != 'Windows' }}
-                run: |
-                    . ./venv/bin/activate_github_actions_env.sh
-
-            -   name: Run Unit Test
-                id: run-test
-                run: |
-                    pytest --junitxml=junit_cura.xml
-                working-directory: tests
-
-            -   name: Save PR metadata
-                if: always()
-                run: |
-                    echo ${{ github.event.number }} > pr-id.txt
-                    echo ${{ github.event.pull_request.head.repo.full_name }} > pr-head-repo.txt
-                    echo ${{ github.event.pull_request.head.ref }} > pr-head-ref.txt
-                working-directory: tests
-
-            -   name: Upload Test Results
-                if: always()
-                uses: actions/upload-artifact@v3
-                with:
-                    name: test-result
-                    path: |
-                        tests/**/*.xml
-                        tests/pr-id.txt
-                        tests/pr-head-repo.txt
-                        tests/pr-head-ref.txt
+          fetch-depth: 2
+
+      - name: Setup Python and pip
+        uses: actions/setup-python@v4
+        with:
+          python-version: '3.11.x'
+          architecture: 'x64'
+          cache: 'pip'
+          cache-dependency-path: .github/workflows/requirements-conan-package.txt
+
+      - name: Install Python requirements and Create default Conan profile
+        run: pip install -r requirements-conan-package.txt
+        working-directory: .github/workflows/
+
+      - name: Use Conan download cache (Bash)
+        if: ${{ runner.os != 'Windows' }}
+        run: conan config set storage.download_cache="$HOME/.conan/conan_download_cache"
+
+      - name: Cache Conan local repository packages (Bash)
+        uses: actions/cache@v3
+        if: ${{ runner.os != 'Windows' }}
+        with:
+          path: |
+            $HOME/.conan/data
+            $HOME/.conan/conan_download_cache
+          key: conan-${{ runner.os }}-${{ runner.arch }}-unit-cache
+
+      # NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
+      #       This is maybe because grub caches the disk it uses last time, which is recreated each time.
+      - name: Install Linux system requirements
+        if: ${{ runner.os == 'Linux' }}
+        run: |
+          sudo rm /var/cache/debconf/config.dat
+          sudo dpkg --configure -a
+          sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
+          sudo apt update
+          sudo apt upgrade
+          sudo apt install build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config -y
+
+      - name: Install GCC-13
+        run: |
+          sudo apt install g++-13 gcc-13 -y
+          sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13
+          sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-13 13
+
+      - name: Get Conan configuration
+        run: |
+          conan config install https://github.com/Ultimaker/conan-config.git
+          conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
+
+      - name: Get Conan profile
+        run: conan profile new default --detect --force
+
+      - name: Install dependencies
+        run: conan install . ${{ needs.conan-recipe-version.outputs.recipe_id_full }} --build=missing --update -o cura:devtools=True  -g VirtualPythonEnv -if venv
+
+      - name: Upload the Dependency package(s)
+        run: conan upload "*" -r cura --all -c
+
+      - name: Set Environment variables for Cura (bash)
+        if: ${{ runner.os != 'Windows' }}
+        run: |
+          . ./venv/bin/activate_github_actions_env.sh
+
+      - name: Run Unit Test
+        id: run-test
+        run: |
+          pytest --junitxml=junit_cura.xml
+        working-directory: tests
+
+      - name: Save PR metadata
+        if: always()
+        run: |
+          echo ${{ github.event.number }} > pr-id.txt
+          echo ${{ github.event.pull_request.head.repo.full_name }} > pr-head-repo.txt
+          echo ${{ github.event.pull_request.head.ref }} > pr-head-ref.txt
+        working-directory: tests
+
+      - name: Upload Test Results
+        if: always()
+        uses: actions/upload-artifact@v3
+        with:
+          name: test-result
+          path: |
+            tests/**/*.xml
+            tests/pr-id.txt
+            tests/pr-head-repo.txt
+            tests/pr-head-ref.txt

+ 73 - 68
.github/workflows/update-translation.yml

@@ -1,82 +1,87 @@
 name: update-translations
 
 on:
-    push:
-        paths:
-            - 'plugins/**'
-            - 'resources/**'
-            - 'cura/**'
-            - 'icons/**'
-            - 'tests/**'
-            - 'packaging/**'
-            - '.github/workflows/conan-*.yml'
-            - '.github/workflows/notify.yml'
-            - '.github/workflows/requirements-conan-package.txt'
-            - 'requirements*.txt'
-            - 'conanfile.py'
-            - 'conandata.yml'
-            - 'GitVersion.yml'
-            - '*.jinja'
-        branches:
-          - '[1-9].[0-9]'
-          - '[1-9].[0-9][0-9]'
-        tags:
-          - '[1-9].[0-9].[0-9]*'
-          - '[1-9].[0-9].[0-9]'
-          - '[1-9].[0-9][0-9].[0-9]*'
+  push:
+    paths:
+      - 'plugins/**'
+      - 'resources/**'
+      - 'cura/**'
+      - 'icons/**'
+      - 'tests/**'
+      - 'packaging/**'
+      - '.github/workflows/conan-*.yml'
+      - '.github/workflows/notify.yml'
+      - '.github/workflows/requirements-conan-package.txt'
+      - 'requirements*.txt'
+      - 'conanfile.py'
+      - 'conandata.yml'
+      - 'GitVersion.yml'
+      - '*.jinja'
+    branches:
+      - '[1-9].[0-9]'
+      - '[1-9].[0-9][0-9]'
+    tags:
+      - '[1-9].[0-9].[0-9]*'
+      - '[1-9].[0-9].[0-9]'
+      - '[1-9].[0-9][0-9].[0-9]*'
 
 jobs:
-    update-translations:
-        name: Update translations
+  update-translations:
+    name: Update translations
 
-        runs-on: ubuntu-latest
-        steps:
-            -   name: Checkout
-                uses: actions/checkout@v3
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v3
 
-            -   name: Cache Conan data
-                id: cache-conan
-                uses: actions/cache@v3
-                with:
-                    path: ~/.conan
-                    key: ${{ runner.os }}-conan
+      - name: Cache Conan data
+        id: cache-conan
+        uses: actions/cache@v3
+        with:
+          path: ~/.conan
+          key: ${{ runner.os }}-conan
 
-            -   name: Setup Python and pip
-                uses: actions/setup-python@v4
-                with:
-                    python-version: 3.10.x
-                    cache: pip
-                    cache-dependency-path: .github/workflows/requirements-conan-package.txt
+      - name: Setup Python and pip
+        uses: actions/setup-python@v4
+        with:
+          python-version: 3.11.x
+          cache: pip
+          cache-dependency-path: .github/workflows/requirements-conan-package.txt
 
-            -   name: Install Python requirements for runner
-                run: pip install -r .github/workflows/requirements-conan-package.txt
+      - name: Install Python requirements for runner
+        run: pip install -r .github/workflows/requirements-conan-package.txt
 
-            # NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
-            #       This is maybe because grub caches the disk it uses last time, which is recreated each time.
-            -   name: Install Linux system requirements
-                if: ${{ runner.os == 'Linux' }}
-                run: |
-                    sudo rm /var/cache/debconf/config.dat
-                    sudo dpkg --configure -a
-                    sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
-                    sudo apt update
-                    sudo apt upgrade
-                    sudo apt install efibootmgr build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config flex bison g++-12 gcc-12 -y
-                    sudo apt install g++-12 gcc-12 -y
-                    sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 12
-                    sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-12 12
+      # NOTE: Due to what are probably github issues, we have to remove the cache and reconfigure before the rest.
+      #       This is maybe because grub caches the disk it uses last time, which is recreated each time.
+      - name: Install Linux system requirements
+        if: ${{ runner.os == 'Linux' }}
+        run: |
+          sudo rm /var/cache/debconf/config.dat
+          sudo dpkg --configure -a
+          sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
+          sudo apt update
+          sudo apt upgrade
+          sudo apt install efibootmgr build-essential checkinstall libegl-dev zlib1g-dev libssl-dev ninja-build autoconf libx11-dev libx11-xcb-dev libfontenc-dev libice-dev libsm-dev libxau-dev libxaw7-dev libxcomposite-dev libxcursor-dev libxdamage-dev libxdmcp-dev libxext-dev libxfixes-dev libxi-dev libxinerama-dev libxkbfile-dev libxmu-dev libxmuu-dev libxpm-dev libxrandr-dev libxrender-dev libxres-dev libxss-dev libxt-dev libxtst-dev libxv-dev libxvmc-dev libxxf86vm-dev xtrans-dev libxcb-render0-dev libxcb-render-util0-dev libxcb-xkb-dev libxcb-icccm4-dev libxcb-image0-dev libxcb-keysyms1-dev libxcb-randr0-dev libxcb-shape0-dev libxcb-sync-dev libxcb-xfixes0-dev libxcb-xinerama0-dev xkb-data libxcb-dri3-dev uuid-dev libxcb-util-dev libxkbcommon-x11-dev pkg-config flex bison g++-12 gcc-12 -y
 
-            -   name: Create the default Conan profile
-                run: conan profile new default --detect --force
+      - name: Install GCC-13
+        run: |
+          sudo apt install g++-13 gcc-13 -y
+          sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 13
+          sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-13 13
 
-            -   name: Get Conan configuration
-                run: conan config install https://github.com/Ultimaker/conan-config.git
+      - name: Create the default Conan profile
+        run: conan profile new default --detect --force
 
-            -   name: generate the files using Conan install
-                run: conan install . --build=missing --update -o cura:devtools=True
+      - name: Get Conan configuration
+        run: |
+          conan config install https://github.com/Ultimaker/conan-config.git
+          conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
 
-            -   uses: stefanzweifel/git-auto-commit-action@v4
-                with:
-                    file_pattern: resources/i18n/*.po resources/i18n/*.pot
-                    status_options: --untracked-files=no
-                    commit_message: update translations
+      - name: generate the files using Conan install
+        run: conan install . --build=missing --update -o cura:devtools=True
+
+      - uses: stefanzweifel/git-auto-commit-action@v4
+        with:
+          file_pattern: resources/i18n/*.po resources/i18n/*.pot
+          status_options: --untracked-files=no
+          commit_message: update translations

+ 15 - 4
cura/Arranging/ArrangeObjectsJob.py

@@ -8,17 +8,20 @@ from UM.Logger import Logger
 from UM.Message import Message
 from UM.Scene.SceneNode import SceneNode
 from UM.i18n import i18nCatalog
-from cura.Arranging.Nest2DArrange import arrange
+from cura.Arranging.GridArrange import GridArrange
+from cura.Arranging.Nest2DArrange import Nest2DArrange
 
 i18n_catalog = i18nCatalog("cura")
 
 
 class ArrangeObjectsJob(Job):
-    def __init__(self, nodes: List[SceneNode], fixed_nodes: List[SceneNode], min_offset = 8) -> None:
+    def __init__(self, nodes: List[SceneNode], fixed_nodes: List[SceneNode], min_offset = 8,
+                *, grid_arrange: bool = False) -> None:
         super().__init__()
         self._nodes = nodes
         self._fixed_nodes = fixed_nodes
         self._min_offset = min_offset
+        self._grid_arrange = grid_arrange
 
     def run(self):
         found_solution_for_all = False
@@ -29,10 +32,18 @@ class ArrangeObjectsJob(Job):
                                  title = i18n_catalog.i18nc("@info:title", "Finding Location"))
         status_message.show()
 
+        if self._grid_arrange:
+            arranger = GridArrange(self._nodes, Application.getInstance().getBuildVolume(), self._fixed_nodes)
+        else:
+            arranger = Nest2DArrange(self._nodes, Application.getInstance().getBuildVolume(), self._fixed_nodes,
+                                     factor=1000)
+
+        found_solution_for_all = False
         try:
-            found_solution_for_all = arrange(self._nodes, Application.getInstance().getBuildVolume(), self._fixed_nodes)
+            found_solution_for_all = arranger.arrange()
         except:  # If the thread crashes, the message should still close
-            Logger.logException("e", "Unable to arrange the objects on the buildplate. The arrange algorithm has crashed.")
+            Logger.logException("e",
+                                "Unable to arrange the objects on the buildplate. The arrange algorithm has crashed.")
 
         status_message.hide()
 

+ 28 - 0
cura/Arranging/Arranger.py

@@ -0,0 +1,28 @@
+from typing import List, TYPE_CHECKING, Optional, Tuple, Set
+
+if TYPE_CHECKING:
+    from UM.Operations.GroupedOperation import GroupedOperation
+
+
+class Arranger:
+    def createGroupOperationForArrange(self, *, add_new_nodes_in_scene: bool = False) -> Tuple["GroupedOperation", int]:
+        """
+        Find placement for a set of scene nodes, but don't actually move them just yet.
+        :param add_new_nodes_in_scene: Whether to create new scene nodes before applying the transformations and rotations
+        :return: tuple (found_solution_for_all, node_items)
+            WHERE
+            found_solution_for_all: Whether the algorithm found a place on the buildplate for all the objects
+            node_items: A list of the nodes return by libnest2d, which contain the new positions on the buildplate
+        """
+        raise NotImplementedError
+
+    def arrange(self, *, add_new_nodes_in_scene: bool = False) -> bool:
+        """
+        Find placement for a set of scene nodes, and move them by using a single grouped operation.
+        :param add_new_nodes_in_scene: Whether to create new scene nodes before applying the transformations and rotations
+        :return: found_solution_for_all: Whether the algorithm found a place on the buildplate for all the objects
+        """
+        grouped_operation, not_fit_count = self.createGroupOperationForArrange(
+            add_new_nodes_in_scene=add_new_nodes_in_scene)
+        grouped_operation.push()
+        return not_fit_count == 0

+ 331 - 0
cura/Arranging/GridArrange.py

@@ -0,0 +1,331 @@
+import math
+from typing import List, TYPE_CHECKING, Tuple, Set
+
+if TYPE_CHECKING:
+    from UM.Scene.SceneNode import SceneNode
+    from cura.BuildVolume import BuildVolume
+
+from UM.Application import Application
+from UM.Math.Vector import Vector
+from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
+from UM.Operations.GroupedOperation import GroupedOperation
+from UM.Operations.TranslateOperation import TranslateOperation
+from cura.Arranging.Arranger import Arranger
+
+
+class GridArrange(Arranger):
+    def __init__(self, nodes_to_arrange: List["SceneNode"], build_volume: "BuildVolume", fixed_nodes: List["SceneNode"] = None):
+        if fixed_nodes is None:
+            fixed_nodes = []
+        self._nodes_to_arrange = nodes_to_arrange
+        self._build_volume = build_volume
+        self._build_volume_bounding_box = build_volume.getBoundingBox()
+        self._fixed_nodes = fixed_nodes
+
+        self._margin_x: float = 1
+        self._margin_y: float = 1
+
+        self._grid_width = 0
+        self._grid_height = 0
+        for node in self._nodes_to_arrange:
+            bounding_box = node.getBoundingBox()
+            self._grid_width = max(self._grid_width, bounding_box.width)
+            self._grid_height = max(self._grid_height, bounding_box.depth)
+        self._grid_width += self._margin_x
+        self._grid_height += self._margin_y
+
+        # Round up the grid size to the nearest cm
+        grid_precision = 10  # 1cm
+        self._grid_width = math.ceil(self._grid_width / grid_precision) * grid_precision
+        self._grid_height = math.ceil(self._grid_height / grid_precision) * grid_precision
+
+        self._offset_x = 0
+        self._offset_y = 0
+        self._findOptimalGridOffset()
+
+        coord_initial_leftover_x = self._build_volume_bounding_box.right + 2 * self._grid_width
+        coord_initial_leftover_y = (self._build_volume_bounding_box.back + self._build_volume_bounding_box.front) * 0.5
+        self._initial_leftover_grid_x, self._initial_leftover_grid_y = self._coordSpaceToGridSpace(
+            coord_initial_leftover_x, coord_initial_leftover_y)
+        self._initial_leftover_grid_x = math.floor(self._initial_leftover_grid_x)
+        self._initial_leftover_grid_y = math.floor(self._initial_leftover_grid_y)
+
+        # Find grid indexes that intersect with fixed objects
+        self._fixed_nodes_grid_ids = set()
+        for node in self._fixed_nodes:
+            self._fixed_nodes_grid_ids = self._fixed_nodes_grid_ids.union(
+                self._intersectingGridIdxInclusive(node.getBoundingBox()))
+
+        #grid indexes that are in disallowed area
+        for polygon in self._build_volume.getDisallowedAreas():
+            self._fixed_nodes_grid_ids = self._fixed_nodes_grid_ids.union(
+            self._getIntersectingGridIdForPolygon(polygon))
+
+        self._build_plate_grid_ids = self._intersectingGridIdxExclusive(self._build_volume_bounding_box)
+
+        # Filter out the corner grid squares if the build plate shape is elliptic
+        if self._build_volume.getShape() == "elliptic":
+            self._build_plate_grid_ids = set(
+                filter(lambda grid_id: self._checkGridUnderDiscSpace(grid_id[0], grid_id[1]),
+                       self._build_plate_grid_ids))
+
+        self._allowed_grid_idx = self._build_plate_grid_ids.difference(self._fixed_nodes_grid_ids)
+
+    def createGroupOperationForArrange(self, *, add_new_nodes_in_scene: bool = False) -> Tuple[GroupedOperation, int]:
+        # Find the sequence in which items are placed
+        coord_build_plate_center_x = self._build_volume_bounding_box.width * 0.5 + self._build_volume_bounding_box.left
+        coord_build_plate_center_y = self._build_volume_bounding_box.depth * 0.5 + self._build_volume_bounding_box.back
+        grid_build_plate_center_x, grid_build_plate_center_y = self._coordSpaceToGridSpace(coord_build_plate_center_x,
+                                                                                           coord_build_plate_center_y)
+
+        sequence: List[Tuple[int, int]] = list(self._allowed_grid_idx)
+        sequence.sort(key=lambda grid_id: (grid_build_plate_center_x - grid_id[0]) ** 2 + (
+                    grid_build_plate_center_y - grid_id[1]) ** 2)
+        scene_root = Application.getInstance().getController().getScene().getRoot()
+        grouped_operation = GroupedOperation()
+
+        for grid_id, node in zip(sequence, self._nodes_to_arrange):
+            if add_new_nodes_in_scene:
+                grouped_operation.addOperation(AddSceneNodeOperation(node, scene_root))
+            grid_x, grid_y = grid_id
+            operation = self._moveNodeOnGrid(node, grid_x, grid_y)
+            grouped_operation.addOperation(operation)
+
+        leftover_nodes = self._nodes_to_arrange[len(sequence):]
+
+        left_over_grid_y = self._initial_leftover_grid_y
+        for node in leftover_nodes:
+            if add_new_nodes_in_scene:
+                grouped_operation.addOperation(AddSceneNodeOperation(node, scene_root))
+            # find the first next grid position that isn't occupied by a fixed node
+            while (self._initial_leftover_grid_x, left_over_grid_y) in self._fixed_nodes_grid_ids:
+                left_over_grid_y = left_over_grid_y - 1
+
+            operation = self._moveNodeOnGrid(node, self._initial_leftover_grid_x, left_over_grid_y)
+            grouped_operation.addOperation(operation)
+            left_over_grid_y = left_over_grid_y - 1
+
+        return grouped_operation, len(leftover_nodes)
+
+    def _findOptimalGridOffset(self):
+        if len(self._fixed_nodes) == 0:
+            self._offset_x = 0
+            self._offset_y = 0
+            return
+
+        if len(self._fixed_nodes) == 1:
+            center_grid_x = 0.5 * self._grid_width + self._build_volume_bounding_box.left
+            center_grid_y = 0.5 * self._grid_height + self._build_volume_bounding_box.back
+
+            bounding_box = self._fixed_nodes[0].getBoundingBox()
+            center_node_x = (bounding_box.left + bounding_box.right) * 0.5
+            center_node_y = (bounding_box.back + bounding_box.front) * 0.5
+
+            self._offset_x = center_node_x - center_grid_x
+            self._offset_y = center_node_y - center_grid_y
+
+            return
+
+        # If there are multiple fixed nodes, an optimal solution is not always possible
+        # We will try to find an offset that minimizes the number of grid intersections
+        # with fixed nodes. The algorithm below achieves this by utilizing a scanline
+        # algorithm. In this algorithm each axis is solved separately as offsetting
+        # is completely independent in each axis. The comments explaining the algorithm
+        # below are for the x-axis, but the same applies for the y-axis.
+        #
+        # Each node either occupies ceil((node.right - node.right) / grid_width) or
+        # ceil((node.right - node.right) / grid_width) + 1 grid squares. We will call
+        # these the node's "footprint".
+        #
+        #                      ┌────────────────┐
+        #   minimum foot-print │      NODE      │
+        #                      └────────────────┘
+        # │    grid 1   │    grid 2    │    grid 3    │    grid 4    |    grid 5    |
+        #                             ┌────────────────┐
+        #          maximum foot-print │      NODE      │
+        #                             └────────────────┘
+        #
+        # The algorithm will find the grid offset such that the number of nodes with
+        # a _minimal_ footprint is _maximized_.
+
+        # The scanline algorithm works as follows, we create events for both end points
+        # of each node's footprint. The event have two properties,
+        # - the coordinate: the amount the endpoint can move to the
+        #      left before it crosses a grid line
+        # - the change: either +1 or -1, indicating whether crossing the grid line
+        #      would result in a minimal footprint node becoming a maximal footprint
+        class Event:
+            def __init__(self, coord: float, change: float):
+                self.coord = coord
+                self.change = change
+
+        # create events for both the horizontal and vertical axis
+        events_horizontal: List[Event] = []
+        events_vertical: List[Event] = []
+
+        for node in self._fixed_nodes:
+            bounding_box = node.getBoundingBox()
+
+            left = bounding_box.left - self._build_volume_bounding_box.left
+            right = bounding_box.right - self._build_volume_bounding_box.left
+            back = bounding_box.back - self._build_volume_bounding_box.back
+            front = bounding_box.front - self._build_volume_bounding_box.back
+
+            value_left = math.ceil(left / self._grid_width) * self._grid_width - left
+            value_right = math.ceil(right / self._grid_width) * self._grid_width - right
+            value_back = math.ceil(back / self._grid_height) * self._grid_height - back
+            value_front = math.ceil(front / self._grid_height) * self._grid_height - front
+
+            # give nodes a weight according to their size. This
+            # weight is heuristically chosen to be proportional to
+            # the number of grid squares the node-boundary occupies
+            weight = bounding_box.width + bounding_box.depth
+
+            events_horizontal.append(Event(value_left, weight))
+            events_horizontal.append(Event(value_right, -weight))
+            events_vertical.append(Event(value_back, weight))
+            events_vertical.append(Event(value_front, -weight))
+
+        events_horizontal.sort(key=lambda event: event.coord)
+        events_vertical.sort(key=lambda event: event.coord)
+
+        def findOptimalShiftAxis(events: List[Event], interval: float) -> float:
+            # executing the actual scanline algorithm
+            # iteratively go through events (left to right) and keep track of the
+            # current footprint. The optimal location is the one with the minimal
+            # footprint. If there are multiple locations with the same minimal
+            # footprint, the optimal location is the one with the largest range
+            # between the left and right endpoint of the footprint.
+            prev_offset = events[-1].coord - interval
+            current_minimal_footprint_count = 0
+
+            best_minimal_footprint_count = float('inf')
+            best_offset_span = float('-inf')
+            best_offset = 0.0
+
+            for event in events:
+                offset_span = event.coord - prev_offset
+
+                if current_minimal_footprint_count < best_minimal_footprint_count or (
+                        current_minimal_footprint_count == best_minimal_footprint_count and offset_span > best_offset_span):
+                    best_minimal_footprint_count = current_minimal_footprint_count
+                    best_offset_span = offset_span
+                    best_offset = event.coord
+
+                current_minimal_footprint_count += event.change
+                prev_offset = event.coord
+
+            return best_offset - best_offset_span * 0.5
+
+        center_grid_x = 0.5 * self._grid_width
+        center_grid_y = 0.5 * self._grid_height
+
+        optimal_center_x = self._grid_width - findOptimalShiftAxis(events_horizontal, self._grid_width)
+        optimal_center_y = self._grid_height - findOptimalShiftAxis(events_vertical, self._grid_height)
+
+        self._offset_x = optimal_center_x - center_grid_x
+        self._offset_y = optimal_center_y - center_grid_y
+
+    def _moveNodeOnGrid(self, node: "SceneNode", grid_x: int, grid_y: int) -> "Operation.Operation":
+        coord_grid_x, coord_grid_y = self._gridSpaceToCoordSpace(grid_x, grid_y)
+        center_grid_x = coord_grid_x + (0.5 * self._grid_width)
+        center_grid_y = coord_grid_y + (0.5 * self._grid_height)
+
+        bounding_box = node.getBoundingBox()
+        center_node_x = (bounding_box.left + bounding_box.right) * 0.5
+        center_node_y = (bounding_box.back + bounding_box.front) * 0.5
+
+        delta_x = center_grid_x - center_node_x
+        delta_y = center_grid_y - center_node_y
+
+        return TranslateOperation(node, Vector(delta_x, 0, delta_y))
+
+    def _getGridCornerPoints(self, bounding_box: "BoundingVolume") -> Tuple[float, float, float, float]:
+        coord_x1 = bounding_box.left
+        coord_x2 = bounding_box.right
+        coord_y1 = bounding_box.back
+        coord_y2 = bounding_box.front
+        grid_x1, grid_y1 = self._coordSpaceToGridSpace(coord_x1, coord_y1)
+        grid_x2, grid_y2 = self._coordSpaceToGridSpace(coord_x2, coord_y2)
+        return grid_x1, grid_y1, grid_x2, grid_y2
+
+    def _getIntersectingGridIdForPolygon(self, polygon)-> Set[Tuple[int, int]]:
+        #       (x0, y0)
+        #       |
+        #       v
+        #       ┌─────────────┐
+        #       │             │
+        #       │             │
+        #       └─────────────┘  < (x1, y1)
+        x0 = float('inf')
+        y0 = float('inf')
+        x1 = float('-inf')
+        y1 = float('-inf')
+        grid_idx = set()
+        for [x, y] in polygon.getPoints():
+            x0 = min(x0, x)
+            y0 = min(y0, y)
+            x1 = max(x1, x)
+            y1 = max(y1, y)
+        grid_x1, grid_y1 = self._coordSpaceToGridSpace(x0, y0)
+        grid_x2, grid_y2 = self._coordSpaceToGridSpace(x1, y1)
+
+        for grid_x in range(math.floor(grid_x1), math.ceil(grid_x2)):
+            for grid_y in range(math.floor(grid_y1), math.ceil(grid_y2)):
+                grid_idx.add((grid_x, grid_y))
+        return grid_idx
+
+    def _intersectingGridIdxInclusive(self, bounding_box: "BoundingVolume") -> Set[Tuple[int, int]]:
+        grid_x1, grid_y1, grid_x2, grid_y2 = self._getGridCornerPoints(bounding_box)
+        grid_idx = set()
+        for grid_x in range(math.floor(grid_x1), math.ceil(grid_x2)):
+            for grid_y in range(math.floor(grid_y1), math.ceil(grid_y2)):
+                grid_idx.add((grid_x, grid_y))
+        return grid_idx
+
+    def _intersectingGridIdxExclusive(self, bounding_box: "BoundingVolume") -> Set[Tuple[int, int]]:
+        grid_x1, grid_y1, grid_x2, grid_y2 = self._getGridCornerPoints(bounding_box)
+        grid_idx = set()
+        for grid_x in range(math.ceil(grid_x1), math.floor(grid_x2)):
+            for grid_y in range(math.ceil(grid_y1), math.floor(grid_y2)):
+                grid_idx.add((grid_x, grid_y))
+        return grid_idx
+
+    def _gridSpaceToCoordSpace(self, x: float, y: float) -> Tuple[float, float]:
+        grid_x = x * self._grid_width + self._build_volume_bounding_box.left + self._offset_x
+        grid_y = y * self._grid_height + self._build_volume_bounding_box.back + self._offset_y
+        return grid_x, grid_y
+
+    def _coordSpaceToGridSpace(self, grid_x: float, grid_y: float) -> Tuple[float, float]:
+        coord_x = (grid_x - self._build_volume_bounding_box.left - self._offset_x) / self._grid_width
+        coord_y = (grid_y - self._build_volume_bounding_box.back - self._offset_y) / self._grid_height
+        return coord_x, coord_y
+
+    def _checkGridUnderDiscSpace(self, grid_x: int, grid_y: int) -> bool:
+        left, back = self._gridSpaceToCoordSpace(grid_x, grid_y)
+        right, front = self._gridSpaceToCoordSpace(grid_x + 1, grid_y + 1)
+        corners = [(left, back), (right, back), (right, front), (left, front)]
+        return all([self._checkPointUnderDiscSpace(x, y) for x, y in corners])
+
+    def _checkPointUnderDiscSpace(self, x: float, y: float) -> bool:
+        disc_x, disc_y = self._coordSpaceToDiscSpace(x, y)
+        distance_to_center_squared = disc_x ** 2 + disc_y ** 2
+        return distance_to_center_squared <= 1.0
+
+    def _coordSpaceToDiscSpace(self, x: float, y: float) -> Tuple[float, float]:
+        # Transform coordinate system to
+        #
+        #       coord_build_plate_left = -1
+        #       |               coord_build_plate_right = 1
+        #       v     (0,1)     v
+        #       ┌───────┬───────┐  < coord_build_plate_back = -1
+        #       │       │       │
+        #       │       │(0,0)  │
+        # (-1,0)├───────o───────┤(1,0)
+        #       │       │       │
+        #       │       │       │
+        #       └───────┴───────┘  < coord_build_plate_front = +1
+        #             (0,-1)
+        disc_x = ((x - self._build_volume_bounding_box.left) / self._build_volume_bounding_box.width) * 2.0 - 1.0
+        disc_y = ((y - self._build_volume_bounding_box.back) / self._build_volume_bounding_box.depth) * 2.0 - 1.0
+        return disc_x, disc_y

Some files were not shown because too many files changed in this diff