HEX
Server: Apache/2.4.65 (Ubuntu)
System: Linux ielts-store-v2 6.8.0-1036-gcp #38~22.04.1-Ubuntu SMP Thu Aug 14 01:19:18 UTC 2025 x86_64
User: root (0)
PHP: 7.2.34-54+ubuntu20.04.1+deb.sury.org+1
Disabled: pcntl_alarm,pcntl_fork,pcntl_waitpid,pcntl_wait,pcntl_wifexited,pcntl_wifstopped,pcntl_wifsignaled,pcntl_wifcontinued,pcntl_wexitstatus,pcntl_wtermsig,pcntl_wstopsig,pcntl_signal,pcntl_signal_get_handler,pcntl_signal_dispatch,pcntl_get_last_error,pcntl_strerror,pcntl_sigprocmask,pcntl_sigwaitinfo,pcntl_sigtimedwait,pcntl_exec,pcntl_getpriority,pcntl_setpriority,pcntl_async_signals,
Upload Files
File: //snap/google-cloud-cli/396/platform/gsutil/third_party/charset_normalizer/.github/workflows/cd.yml
name: Continuous Delivery

on:
  workflow_dispatch:

  release:
    types:
      - created

permissions:
  contents: read

jobs:
  pre_flight_check:
    name: Preflight Checks
    uses: ./.github/workflows/ci.yml

  universal-wheel:
    name: Build Universal Wheel
    runs-on: ubuntu-latest
    needs:
      - pre_flight_check
    steps:
      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
      - name: Set up Python
        uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0
        with:
          python-version: '3.11'
      - name: Update pip, setuptools, wheel, build and twine
        run: |
          python -m pip install --upgrade pip
          python -m pip install setuptools wheel build
      - name: Build Wheel
        env:
          CHARSET_NORMALIZER_USE_MYPYC: '0'
        run: python -m build
      - name: Upload artifacts
        uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
        with:
          name: dist
          path: dist

  build-wheels:
    name: Build wheels on ${{ matrix.os }} ${{ matrix.qemu }}
    runs-on: ${{ matrix.os }}
    needs: pre_flight_check
    strategy:
      matrix:
        os: [ ubuntu-latest, windows-latest, macos-13 ]
        qemu: [ '' ]
        include:
          # Split ubuntu job for the sake of speed-up
          - os: ubuntu-latest
            qemu: aarch64
          - os: ubuntu-latest
            qemu: ppc64le
          - os: ubuntu-latest
            qemu: s390x
    steps:
      - name: Checkout
        uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
        with:
          submodules: true
      - name: Set up QEMU
        if: ${{ matrix.qemu }}
        uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # v3.2.0
        with:
          platforms: all
        id: qemu
      - name: Prepare emulation
        run: |
          if [[ -n "${{ matrix.qemu }}" ]]; then
            # Build emulated architectures only if QEMU is set,
            # use default "auto" otherwise
            echo "CIBW_ARCHS_LINUX=${{ matrix.qemu }}" >> $GITHUB_ENV
          fi
        shell: bash
      - name: Setup Python
        uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0
      - name: Update pip, wheel, setuptools, build, twine
        run: |
          python -m pip install -U pip wheel setuptools build twine
      - name: Build wheels
        uses: pypa/cibuildwheel@f1859528322d7b29d4493ee241a167807661dfb4 # v2.21.2
        env:
          CIBW_BUILD_FRONTEND: "pip; args: --no-build-isolation"
          CIBW_ARCHS_MACOS: x86_64 arm64 universal2
          CIBW_ENVIRONMENT: CHARSET_NORMALIZER_USE_MYPYC='1'
          CIBW_BEFORE_BUILD: pip install -r build-requirements.txt
          CIBW_TEST_REQUIRES: pytest
          CIBW_TEST_COMMAND: pytest -c {package} {package}/tests
          CIBW_SKIP: pp* cp36*
      - name: Upload artifacts
        uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
        with:
          name: dist
          path: ./wheelhouse/*.whl

  checksum:
    name: Compute hashes
    runs-on: ubuntu-latest
    needs:
      - build-wheels
      - universal-wheel
    outputs:
      hashes: ${{ steps.compute.outputs.hashes }}
    steps:
      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
      - name: Download distributions
        uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a
        with:
          name: dist
          path: dist
      - name: Collected dists
        run: |
          tree dist
      - name: Generate hashes
        id: compute  #  needs.checksum.outputs.hashes
        working-directory: ./dist
        run: echo "hashes=$(sha256sum * | base64 -w0)" >> $GITHUB_OUTPUT

  provenance:
    needs: checksum
    uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.0.0
    permissions:
      actions: read
      id-token: write
      contents: write
    with:
      base64-subjects: ${{ needs.checksum.outputs.hashes }}
      upload-assets: true
      compile-generator: true

  deploy:
    name: 🚀 Deploy to PyPi
    runs-on: ubuntu-latest
    if: startsWith(github.ref, 'refs/tags/')
    permissions:
      id-token: write
      contents: write
    needs: provenance
    environment:
      name: pypi
      url: https://pypi.org/project/charset-normalizer/
    steps:
      - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
      - name: Download distributions
        uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a
        with:
          name: dist
          path: dist
      - name: Collected dists
        run: |
          tree dist
      - name: Publish package distributions to PyPI
        uses: pypa/gh-action-pypi-publish@f7600683efdcb7656dec5b29656edb7bc586e597 # release/v1
      - name: Upload dists to GitHub Release
        env:
          GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
        run: |
          gh release upload ${{ github.ref_name }} dist/* --repo ${{ github.repository }}