HEX
Server: Apache/2.4.65 (Ubuntu)
System: Linux ielts-store-v2 6.8.0-1036-gcp #38~22.04.1-Ubuntu SMP Thu Aug 14 01:19:18 UTC 2025 x86_64
User: root (0)
PHP: 7.2.34-54+ubuntu20.04.1+deb.sury.org+1
Disabled: pcntl_alarm,pcntl_fork,pcntl_waitpid,pcntl_wait,pcntl_wifexited,pcntl_wifstopped,pcntl_wifsignaled,pcntl_wifcontinued,pcntl_wexitstatus,pcntl_wtermsig,pcntl_wstopsig,pcntl_signal,pcntl_signal_get_handler,pcntl_signal_dispatch,pcntl_get_last_error,pcntl_strerror,pcntl_sigprocmask,pcntl_sigwaitinfo,pcntl_sigtimedwait,pcntl_exec,pcntl_getpriority,pcntl_setpriority,pcntl_async_signals,
Upload Files
File: //snap/google-cloud-cli/current/lib/surface/healthcare/dicom_stores/export/bq.yaml
- release_tracks: [ALPHA, BETA, GA]

  help_text:
    brief:  Export a Cloud Healthcare API API DICOM store to BigQuery.
    description: Export a Cloud Healthcare API API DICOM store to BigQuery.
    examples: |
      To export the dicom-store ``test-dicom-store'' to the BigQuery table ``testtable'' in the dataset ``testdataset'', overwriting any existing table, run:

        $ {command} test-dicom-store --bq-table=bq://my-project.testdataset.testtable --dataset=test-dataset --write-disposition=write-truncate

      To export the dicom-store ``test-dicom-store'' to the BigQuery table ``testtable'' in the dataset ``testdataset'', appending any existing table, run:

        $ {command} test-dicom-store --bq-table=bq://my-project.testdataset.testtable --dataset=test-dataset --write-disposition=write-append

  request:
    collection: healthcare.projects.locations.datasets.dicomStores
    method: export
    ALPHA:
      api_version: v1alpha2
    BETA:
      api_version: v1beta1
    GA:
      api_version: v1

  arguments:
    resource:
      help_text: Cloud Healthcare API DICOM store to export.
      spec: !REF googlecloudsdk.command_lib.healthcare.resources:dicom_store

    params:
    - arg_name: bq-table
      api_field: exportDicomDataRequest.bigqueryDestination.tableUri
      required: true
      help_text: |
        The BigQuery table where the DICOM store should
        be written. If this table does not exist, a new table with the given
        name will be created.
    - arg_name: overwrite-table
      api_field: exportDicomDataRequest.bigqueryDestination.force
      default: null
      help_text: |
        If the destination table already exists and this flag is
        `TRUE`, the table will be overwritten by the contents of the DICOM
        store. If the flag is not set and the destination table already exists,
        the export call returns an error.
    - arg_name: write-disposition
      api_field: exportDicomDataRequest.bigqueryDestination.writeDisposition
      help_text: |
        Determines whether the existing table in the destination is to be overwritten or appended to.
      type: str
      choices:
      - arg_value: write-empty
        enum_value: WRITE_EMPTY
        help_text: |
          Only export data if the destination table is empty.
      - arg_value: write-truncate
        enum_value: WRITE_TRUNCATE
        help_text: |
          Erase all existing data in a table before writing the instances.
      - arg_value: write-append
        enum_value: WRITE_APPEND
        help_text: |
          Append data to the existing table.
    - arg_name: filter-config-gcs-uri
      release_tracks: [ALPHA, BETA]
      api_field: exportDicomDataRequest.filterConfig.resourcePathsGcsUri
      help_text: |
        Cloud Storage location of the filter configuration file. The FILTER_CONFIG_GCS_URI must
        be in the format gs://bucket/path/to/object. The filter configuration file must contain a
        list of resource paths separated by newline characters (\n or \r\n). Each resource path must
        be in the format "/studies/{studyUID}[/series/{seriesUID}[/instances/{instanceUID}]]"

        Cloud Healthcare API service account must have the roles/storage.objectViewer Cloud IAM
        role for this Cloud Storage location.
  async:
    collection: healthcare.projects.locations.datasets.operations