Skip to content

[prism] Support interval windows/prep for custom windows. #1978

[prism] Support interval windows/prep for custom windows.

[prism] Support interval windows/prep for custom windows. #1978

GitHub Actions / Test Results failed Aug 15, 2024 in 0s

4 fail, 52 skipped, 82 pass in 1h 30m 51s

138 tests   82 ✅  1h 30m 51s ⏱️
  1 suites  52 💤
  1 files     4 ❌

Results for commit c197e4f.

Annotations

Check warning on line 0 in apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_iobase_source_with_row_restriction (apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests) failed

sdks/python/pytest_postCommitIT-df-py39.xml [took 2s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Thu, 15 Aug 2024 19:22:01 GMT', 'server': 'ESF', 'cache-control': 'private', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(59c1a26627f492c9): The workflow could not be created. Causes: (bf11c22a1d6918aa): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests testMethod=test_iobase_source_with_row_restriction>

    @pytest.mark.it_postcommit
    def test_iobase_source_with_row_restriction(self):
      EXPECTED_TABLE_DATA = [{
          'number': 1,
          'string': '你好',
          'time': datetime.time(12, 44, 31),
          'datetime': datetime.datetime(2018, 12, 31, 12, 44, 31),
          'rec': None
      }]
      with beam.Pipeline(argv=self.args) as p:
        result = (
            p | 'Read with BigQuery Storage API' >> beam.io.ReadFromBigQuery(
                method=beam.io.ReadFromBigQuery.Method.DIRECT_READ,
                table=self.temp_table_reference,
                row_restriction='number < 2',
                use_native_datetime=True))
>       assert_that(result, equal_to(EXPECTED_TABLE_DATA))

apache_beam/io/gcp/bigquery_read_it_test.py:514: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:560: in run
    return Pipeline.from_runner_api(
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:725: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:831: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/-1734967050/lib/python3.9/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/-1734967050/lib/python3.9/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7fbb265bdfa0>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Thu, 1...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20240815192200104693-3018'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Thu, 15 Aug 2024 19:22:01 GMT', 'server': 'ESF', 'cache-control': 'private', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(59c1a26627f492c9): The workflow could not be created. Causes: (bf11c22a1d6918aa): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/-1734967050/lib/python3.9/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_datastore_wordcount_it (apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT) failed

sdks/python/pytest_postCommitIT-df-py39.xml [took 5s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Thu, 15 Aug 2024 19:22:06 GMT', 'server': 'ESF', 'cache-control': 'private', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(773a6eb683d79bb9): The workflow could not be created. Causes: (36ec8ecbdeb934bd): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT testMethod=test_datastore_wordcount_it>

    @pytest.mark.it_postcommit
    def test_datastore_wordcount_it(self):
      test_pipeline = TestPipeline(is_integration_test=True)
      kind = self.DATASTORE_WORDCOUNT_KIND
      output = '/'.join([
          test_pipeline.get_option('output'),
          str(int(time.time() * 1000)),
          'datastore_wordcount_results'
      ])
    
      arg_sleep_secs = test_pipeline.get_option('sleep_secs')
      sleep_secs = int(arg_sleep_secs) if arg_sleep_secs is not None else None
      pipeline_verifiers = [
          PipelineStateMatcher(),
          FileChecksumMatcher(
              output + '*-of-*', self.EXPECTED_CHECKSUM, sleep_secs)
      ]
      extra_opts = {
          'kind': kind,
          'output': output,
          # Comment this out to regenerate input data on Datastore (delete
          # existing data first using the bulk delete Dataflow template).
          'read_only': True,
          'on_success_matcher': all_of(*pipeline_verifiers)
      }
    
>     datastore_wordcount.run(
          test_pipeline.get_full_options_as_args(**extra_opts))

apache_beam/examples/cookbook/datastore_wordcount_it_test.py:71: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/examples/cookbook/datastore_wordcount.py:246: in run
    result = read_from_datastore(project, known_args, pipeline_options)
apache_beam/examples/cookbook/datastore_wordcount.py:190: in read_from_datastore
    result = p.run()
apache_beam/pipeline.py:560: in run
    return Pipeline.from_runner_api(
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:725: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:831: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/-1734967050/lib/python3.9/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/-1734967050/lib/python3.9/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7ecc3998fdf0>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Thu, 1...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20240815192205310139-9033'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Thu, 15 Aug 2024 19:22:06 GMT', 'server': 'ESF', 'cache-control': 'private', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(773a6eb683d79bb9): The workflow could not be created. Causes: (36ec8ecbdeb934bd): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/-1734967050/lib/python3.9/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_iobase_source_with_very_selective_filters (apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests) failed

sdks/python/pytest_postCommitIT-df-py39.xml [took 3s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Thu, 15 Aug 2024 19:22:05 GMT', 'server': 'ESF', 'cache-control': 'private', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(bc7c4b645586175b): The workflow could not be created. Causes: (18537d3cce59c1e4): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests testMethod=test_iobase_source_with_very_selective_filters>

    @pytest.mark.it_postcommit
    def test_iobase_source_with_very_selective_filters(self):
      with beam.Pipeline(argv=self.args) as p:
        result = (
            p | 'Read with BigQuery Storage API' >> beam.io.ReadFromBigQuery(
                method=beam.io.ReadFromBigQuery.Method.DIRECT_READ,
                project=self.temp_table_reference.projectId,
                dataset=self.temp_table_reference.datasetId,
                table=self.temp_table_reference.tableId,
                row_restriction='number > 4',
                selected_fields=['string']))
>       assert_that(result, equal_to([]))

apache_beam/io/gcp/bigquery_read_it_test.py:552: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:560: in run
    return Pipeline.from_runner_api(
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:725: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:831: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/-1734967050/lib/python3.9/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/-1734967050/lib/python3.9/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7fbb26465070>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Thu, 1...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20240815192204044685-2323'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Thu, 15 Aug 2024 19:22:05 GMT', 'server': 'ESF', 'cache-control': 'private', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(bc7c4b645586175b): The workflow could not be created. Causes: (18537d3cce59c1e4): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/-1734967050/lib/python3.9/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests) failed

sdks/python/pytest_postCommitIT-df-py39.xml [took 4s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Thu, 15 Aug 2024 19:22:10 GMT', 'server': 'ESF', 'cache-control': 'private', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(382adfc1ece233ab): The workflow could not be created. Causes: (df5b1643dca50828): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests testMethod=test_iobase_source>

    @pytest.mark.it_postcommit
    def test_iobase_source(self):
      with beam.Pipeline(argv=self.args) as p:
        result = (
            p
            | 'read' >> beam.io.ReadFromBigQuery(
                query=self.query,
                use_standard_sql=True,
                project=self.project,
                bigquery_job_labels={'launcher': 'apache_beam_tests'})
            | beam.Map(datetime_to_utc))
>       assert_that(result, equal_to(self.get_expected_data(native=False)))

apache_beam/io/gcp/bigquery_read_it_test.py:722: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:613: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:560: in run
    return Pipeline.from_runner_api(
apache_beam/pipeline.py:587: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:725: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:831: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/-1734967050/lib/python3.9/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/-1734967050/lib/python3.9/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7fbb4686ddf0>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Thu, 1...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20240815192208911794-6809'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Thu, 15 Aug 2024 19:22:10 GMT', 'server': 'ESF', 'cache-control': 'private', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(382adfc1ece233ab): The workflow could not be created. Causes: (df5b1643dca50828): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/-1734967050/lib/python3.9/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check notice on line 0 in .github

See this annotation in the file changed.

@github-actions github-actions / Test Results

52 skipped tests found

There are 52 skipped tests, see "Raw output" for the full list of skipped tests.
Raw output
apache_beam.examples.inference.tfx_bsl.tfx_bsl_inference_it_test.TFXRunInferenceTests ‑ test_tfx_run_inference_mobilenetv2
apache_beam.examples.ml_transform.ml_transform_it_test
apache_beam.examples.snippets.transforms.elementwise.mltransform_test
apache_beam.examples.snippets.transforms.elementwise.runinference_test
apache_beam.examples.streaming_wordcount_debugging_it_test.StreamingWordcountDebuggingIT ‑ test_streaming_wordcount_debugging_it
apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT ‑ test_bqfl_streaming
apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT ‑ test_bqfl_streaming_with_copy_jobs
apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT ‑ test_bqfl_streaming_with_dynamic_destinations
apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests ‑ test_big_query_write_schema_autodetect
apache_beam.ml.gcp.naturallanguageml_test_it.NaturalLanguageMlTestIT ‑ test_analyzing_syntax
apache_beam.ml.gcp.videointelligenceml_test_it.VideoIntelligenceMlTestIT ‑ test_label_detection_with_video_context
apache_beam.ml.inference.base_test.RunInferenceBaseTest ‑ test_run_inference_with_side_inputin_streaming
apache_beam.ml.inference.huggingface_inference_it_test
apache_beam.ml.inference.huggingface_inference_test
apache_beam.ml.inference.onnx_inference_it_test.OnnxInference ‑ test_onnx_run_inference_roberta_sentiment_classification
apache_beam.ml.inference.onnx_inference_test
apache_beam.ml.inference.pytorch_inference_it_test.PyTorchInference ‑ test_torch_run_inference_bert_for_masked_lm
apache_beam.ml.inference.pytorch_inference_it_test.PyTorchInference ‑ test_torch_run_inference_bert_for_masked_lm_large_model
apache_beam.ml.inference.pytorch_inference_it_test.PyTorchInference ‑ test_torch_run_inference_coco_maskrcnn_resnet50_fpn
apache_beam.ml.inference.pytorch_inference_it_test.PyTorchInference ‑ test_torch_run_inference_coco_maskrcnn_resnet50_fpn_v1_and_v2
apache_beam.ml.inference.pytorch_inference_it_test.PyTorchInference ‑ test_torch_run_inference_imagenet_mobilenetv2
apache_beam.ml.inference.pytorch_inference_test
apache_beam.ml.inference.tensorflow_inference_it_test.TensorflowInference ‑ test_tf_imagenet_image_segmentation
apache_beam.ml.inference.tensorflow_inference_it_test.TensorflowInference ‑ test_tf_mnist_classification
apache_beam.ml.inference.tensorflow_inference_it_test.TensorflowInference ‑ test_tf_mnist_classification_large_model
apache_beam.ml.inference.tensorflow_inference_it_test.TensorflowInference ‑ test_tf_mnist_with_weights_classification
apache_beam.ml.inference.tensorrt_inference_test
apache_beam.ml.inference.vertex_ai_inference_it_test
apache_beam.ml.inference.xgboost_inference_it_test.XGBoostInference ‑ test_iris_classification_datatable_multi_batch
apache_beam.ml.inference.xgboost_inference_it_test.XGBoostInference ‑ test_iris_classification_datatable_single_batch
apache_beam.ml.inference.xgboost_inference_it_test.XGBoostInference ‑ test_iris_classification_numpy_multi_batch
apache_beam.ml.inference.xgboost_inference_it_test.XGBoostInference ‑ test_iris_classification_numpy_single_batch
apache_beam.ml.inference.xgboost_inference_it_test.XGBoostInference ‑ test_iris_classification_numpy_single_batch_large_model
apache_beam.ml.inference.xgboost_inference_it_test.XGBoostInference ‑ test_iris_classification_pandas_multi_batch
apache_beam.ml.inference.xgboost_inference_it_test.XGBoostInference ‑ test_iris_classification_pandas_single_batch
apache_beam.ml.inference.xgboost_inference_it_test.XGBoostInference ‑ test_iris_classification_scipy_multi_batch
apache_beam.ml.inference.xgboost_inference_it_test.XGBoostInference ‑ test_iris_classification_scipy_single_batch
apache_beam.ml.inference.xgboost_inference_test
apache_beam.ml.transforms.handlers_test
apache_beam.ml.transforms.tft_test
apache_beam.runners.dask.dask_runner_test
apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest ‑ test_metrics_it
apache_beam.testing.analyzers.perf_analysis_test
apache_beam.testing.benchmarks.cloudml.cloudml_benchmark_test
apache_beam.testing.test_stream_it_test.TestStreamIntegrationTests ‑ test_basic_execution
apache_beam.testing.test_stream_it_test.TestStreamIntegrationTests ‑ test_multiple_outputs
apache_beam.testing.test_stream_it_test.TestStreamIntegrationTests ‑ test_multiple_outputs_with_watermark_advancement
apache_beam.transforms.enrichment_handlers.feast_feature_store_it_test
apache_beam.transforms.enrichment_handlers.feast_feature_store_test
apache_beam.transforms.periodicsequence_it_test.PeriodicSequenceIT ‑ test_periodicsequence_outputs_valid_watermarks_it
apache_beam.typehints.pytorch_type_compatibility_test
apache_beam.yaml.yaml_ml_test

Check notice on line 0 in .github

See this annotation in the file changed.

@github-actions github-actions / Test Results

138 tests found

There are 138 tests, see "Raw output" for the full list of tests.
Raw output
apache_beam.dataframe.io_it_test.ReadUsingReadGbqTests ‑ test_ReadGbq
apache_beam.dataframe.io_it_test.ReadUsingReadGbqTests ‑ test_ReadGbq_direct_read
apache_beam.dataframe.io_it_test.ReadUsingReadGbqTests ‑ test_ReadGbq_direct_read_with_project
apache_beam.dataframe.io_it_test.ReadUsingReadGbqTests ‑ test_ReadGbq_export_with_project
apache_beam.dataframe.io_it_test.ReadUsingReadGbqTests ‑ test_ReadGbq_with_computation
apache_beam.examples.complete.autocomplete_test.AutocompleteTest ‑ test_autocomplete_it
apache_beam.examples.complete.game.game_stats_it_test.GameStatsIT ‑ test_game_stats_it
apache_beam.examples.complete.game.hourly_team_score_it_test.HourlyTeamScoreIT ‑ test_hourly_team_score_it
apache_beam.examples.complete.game.leader_board_it_test.LeaderBoardIT ‑ test_leader_board_it
apache_beam.examples.complete.game.user_score_it_test.UserScoreIT ‑ test_user_score_it
apache_beam.examples.complete.juliaset.juliaset.juliaset_test_it.JuliaSetTestIT ‑ test_run_example_with_setup_file
apache_beam.examples.cookbook.bigquery_tornadoes_it_test.BigqueryTornadoesIT ‑ test_bigquery_tornadoes_it
apache_beam.examples.cookbook.bigtableio_it_test.BigtableIOWriteTest ‑ test_bigtable_write
apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT ‑ test_datastore_wordcount_it
apache_beam.examples.dataframe.flight_delays_it_test.FlightDelaysTest ‑ test_flight_delays
apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT ‑ test_aggregation
apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT ‑ test_enrich
apache_beam.examples.fastavro_it_test.FastavroIT ‑ test_avro_it
apache_beam.examples.inference.tfx_bsl.tfx_bsl_inference_it_test.TFXRunInferenceTests ‑ test_tfx_run_inference_mobilenetv2
apache_beam.examples.ml_transform.ml_transform_it_test
apache_beam.examples.snippets.transforms.elementwise.mltransform_test
apache_beam.examples.snippets.transforms.elementwise.runinference_test
apache_beam.examples.streaming_wordcount_debugging_it_test.StreamingWordcountDebuggingIT ‑ test_streaming_wordcount_debugging_it
apache_beam.examples.streaming_wordcount_it_test.StreamingWordCountIT ‑ test_streaming_wordcount_it
apache_beam.examples.wordcount_it_test.WordCountIT ‑ test_wordcount_impersonation_it
apache_beam.examples.wordcount_it_test.WordCountIT ‑ test_wordcount_it
apache_beam.io.fileio_test.MatchIntegrationTest ‑ test_transform_on_gcs
apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT ‑ test_big_query_legacy_sql
apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT ‑ test_big_query_new_types
apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT ‑ test_big_query_new_types_avro
apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT ‑ test_big_query_standard_sql
apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT ‑ test_batch_copy_jobs_with_no_input_schema
apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT ‑ test_bqfl_streaming
apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT ‑ test_bqfl_streaming_with_copy_jobs
apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT ‑ test_bqfl_streaming_with_dynamic_destinations
apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT ‑ test_multiple_destinations_transform
apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT ‑ test_one_job_fails_all_jobs_fail
apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT ‑ test_bigquery_read_1M_python
apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT ‑ test_bigquery_read_custom_1M_python
apache_beam.io.gcp.bigquery_json_it_test.BigQueryJsonIT ‑ test_direct_read
apache_beam.io.gcp.bigquery_json_it_test.BigQueryJsonIT ‑ test_export_read
apache_beam.io.gcp.bigquery_json_it_test.BigQueryJsonIT ‑ test_file_loads_write
apache_beam.io.gcp.bigquery_json_it_test.BigQueryJsonIT ‑ test_query_read
apache_beam.io.gcp.bigquery_json_it_test.BigQueryJsonIT ‑ test_streaming_inserts
apache_beam.io.gcp.bigquery_read_it_test.ReadAllBQTests ‑ test_read_queries
apache_beam.io.gcp.bigquery_read_it_test.ReadInteractiveRunnerTests ‑ test_read_in_interactive_runner
apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests ‑ test_iobase_source
apache_beam.io.gcp.bigquery_read_it_test.ReadNewTypesTests ‑ test_native_source
apache_beam.io.gcp.bigquery_read_it_test.ReadTests ‑ test_iobase_source
apache_beam.io.gcp.bigquery_read_it_test.ReadTests ‑ test_native_source
apache_beam.io.gcp.bigquery_read_it_test.ReadTests ‑ test_table_schema_retrieve
apache_beam.io.gcp.bigquery_read_it_test.ReadTests ‑ test_table_schema_retrieve_specifying_only_table
apache_beam.io.gcp.bigquery_read_it_test.ReadTests ‑ test_table_schema_retrieve_with_direct_read
apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests ‑ test_iobase_source
apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests ‑ test_iobase_source_with_column_selection
apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests ‑ test_iobase_source_with_column_selection_and_row_restriction
apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests ‑ test_iobase_source_with_column_selection_and_row_restriction_rows
apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests ‑ test_iobase_source_with_native_datetime
apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests ‑ test_iobase_source_with_query
apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests ‑ test_iobase_source_with_query_and_filters
apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests ‑ test_iobase_source_with_row_restriction
apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests ‑ test_iobase_source_with_very_selective_filters
apache_beam.io.gcp.bigquery_test.BigQueryFileLoadsIntegrationTests ‑ test_avro_file_load
apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests ‑ test_multiple_destinations_transform
apache_beam.io.gcp.bigquery_test.BigQueryStreamingInsertTransformIntegrationTests ‑ test_value_provider_transform
apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT ‑ test_file_loads
apache_beam.io.gcp.bigquery_test.PubSubBigQueryIT ‑ test_streaming_inserts
apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests ‑ test_big_query_write
apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests ‑ test_big_query_write_insert_errors_reporting
apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests ‑ test_big_query_write_insert_non_transient_api_call_error
apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests ‑ test_big_query_write_new_types
apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests ‑ test_big_query_write_schema_autodetect
apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests ‑ test_big_query_write_without_schema
apache_beam.io.gcp.datastore.v1new.datastore_write_it_test.DatastoreWriteIT ‑ test_datastore_write_limit
apache_beam.io.gcp.gcsfilesystem_integration_test.GcsFileSystemIntegrationTest ‑ test_copy
apache_beam.io.gcp.gcsfilesystem_integration_test.GcsFileSystemIntegrationTest ‑ test_rename
apache_beam.io.gcp.gcsfilesystem_integration_test.GcsFileSystemIntegrationTest ‑ test_rename_error
apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest ‑ test_batch_copy_and_delete
apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest ‑ test_copy
apache_beam.io.gcp.gcsio_integration_test.GcsIOIntegrationTest ‑ test_create_default_bucket
apache_beam.io.gcp.healthcare.dicomio_integration_test.DICOMIoIntegrationTest ‑ test_dicom_search_instances
apache_beam.io.gcp.healthcare.dicomio_integration_test.DICOMIoIntegrationTest ‑ test_dicom_store_instance_from_gcs
apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest ‑ test_streaming_data_only
apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest ‑ test_streaming_with_attributes
apache_beam.io.parquetio_it_test.TestParquetIT ‑ test_parquetio_it
apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT ‑ test_deidentification
apache_beam.ml.gcp.cloud_dlp_it_test.CloudDLPIT ‑ test_inspection
apache_beam.ml.gcp.naturallanguageml_test_it.NaturalLanguageMlTestIT ‑ test_analyzing_syntax
apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT ‑ test_create_catalog_item
apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT ‑ test_create_user_event
apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT ‑ test_predict
apache_beam.ml.gcp.videointelligenceml_test_it.VideoIntelligenceMlTestIT ‑ test_label_detection_with_video_context
apache_beam.ml.gcp.visionml_test_it.VisionMlTestIT ‑ test_text_detection_with_language_hint
apache_beam.ml.inference.base_test.RunInferenceBaseTest ‑ test_run_inference_with_side_inputin_streaming
apache_beam.ml.inference.huggingface_inference_it_test
apache_beam.ml.inference.huggingface_inference_test
apache_beam.ml.inference.onnx_inference_it_test.OnnxInference ‑ test_onnx_run_inference_roberta_sentiment_classification
apache_beam.ml.inference.onnx_inference_test
apache_beam.ml.inference.pytorch_inference_it_test.PyTorchInference ‑ test_torch_run_inference_bert_for_masked_lm
apache_beam.ml.inference.pytorch_inference_it_test.PyTorchInference ‑ test_torch_run_inference_bert_for_masked_lm_large_model
apache_beam.ml.inference.pytorch_inference_it_test.PyTorchInference ‑ test_torch_run_inference_coco_maskrcnn_resnet50_fpn
apache_beam.ml.inference.pytorch_inference_it_test.PyTorchInference ‑ test_torch_run_inference_coco_maskrcnn_resnet50_fpn_v1_and_v2
apache_beam.ml.inference.pytorch_inference_it_test.PyTorchInference ‑ test_torch_run_inference_imagenet_mobilenetv2
apache_beam.ml.inference.pytorch_inference_test
apache_beam.ml.inference.sklearn_inference_it_test.SklearnInference ‑ test_sklearn_mnist_classification
apache_beam.ml.inference.sklearn_inference_it_test.SklearnInference ‑ test_sklearn_mnist_classification_large_model
apache_beam.ml.inference.sklearn_inference_it_test.SklearnInference ‑ test_sklearn_regression
apache_beam.ml.inference.tensorflow_inference_it_test.TensorflowInference ‑ test_tf_imagenet_image_segmentation
apache_beam.ml.inference.tensorflow_inference_it_test.TensorflowInference ‑ test_tf_mnist_classification
apache_beam.ml.inference.tensorflow_inference_it_test.TensorflowInference ‑ test_tf_mnist_classification_large_model
apache_beam.ml.inference.tensorflow_inference_it_test.TensorflowInference ‑ test_tf_mnist_with_weights_classification
apache_beam.ml.inference.tensorrt_inference_test
apache_beam.ml.inference.vertex_ai_inference_it_test
apache_beam.ml.inference.xgboost_inference_it_test.XGBoostInference ‑ test_iris_classification_datatable_multi_batch
apache_beam.ml.inference.xgboost_inference_it_test.XGBoostInference ‑ test_iris_classification_datatable_single_batch
apache_beam.ml.inference.xgboost_inference_it_test.XGBoostInference ‑ test_iris_classification_numpy_multi_batch
apache_beam.ml.inference.xgboost_inference_it_test.XGBoostInference ‑ test_iris_classification_numpy_single_batch
apache_beam.ml.inference.xgboost_inference_it_test.XGBoostInference ‑ test_iris_classification_numpy_single_batch_large_model
apache_beam.ml.inference.xgboost_inference_it_test.XGBoostInference ‑ test_iris_classification_pandas_multi_batch
apache_beam.ml.inference.xgboost_inference_it_test.XGBoostInference ‑ test_iris_classification_pandas_single_batch
apache_beam.ml.inference.xgboost_inference_it_test.XGBoostInference ‑ test_iris_classification_scipy_multi_batch
apache_beam.ml.inference.xgboost_inference_it_test.XGBoostInference ‑ test_iris_classification_scipy_single_batch
apache_beam.ml.inference.xgboost_inference_test
apache_beam.ml.transforms.handlers_test
apache_beam.ml.transforms.tft_test
apache_beam.runners.dask.dask_runner_test
apache_beam.runners.dataflow.dataflow_exercise_metrics_pipeline_test.ExerciseMetricsPipelineTest ‑ test_metrics_it
apache_beam.testing.analyzers.perf_analysis_test
apache_beam.testing.benchmarks.cloudml.cloudml_benchmark_test
apache_beam.testing.test_stream_it_test.TestStreamIntegrationTests ‑ test_basic_execution
apache_beam.testing.test_stream_it_test.TestStreamIntegrationTests ‑ test_multiple_outputs
apache_beam.testing.test_stream_it_test.TestStreamIntegrationTests ‑ test_multiple_outputs_with_watermark_advancement
apache_beam.transforms.enrichment_handlers.feast_feature_store_it_test
apache_beam.transforms.enrichment_handlers.feast_feature_store_test
apache_beam.transforms.external_it_test.ExternalTransformIT ‑ test_job_python_from_python_it
apache_beam.transforms.periodicsequence_it_test.PeriodicSequenceIT ‑ test_periodicsequence_outputs_valid_watermarks_it
apache_beam.typehints.pytorch_type_compatibility_test
apache_beam.yaml.yaml_ml_test