Skip to content

Commit

Permalink
AN-296 Upgrade Werkzeug to 3.0.6 (#796)
Browse files Browse the repository at this point in the history
* Update packages

* Don't upgrade greenlet

* Remove special pyyaml handling

* Updates for new connexion version

* Additional updates

* Run with uvicorn for connexion 3

* Play nice with Connexion 3

* Add univorn for non-dev

* Make linter happy

* Make linter happier

* Update test app creation

* Doc fix

* Don't unnecessarily confuse us with stack traces

* Update tests to not use flask_testing

* Fix test

* Remove unused pip constraint

* Remove unused imports

* Linting

* Finish deleting constraints

* Reorder API endpoints to make Starlette happy

* Add comment warning about API endpoint ordering
  • Loading branch information
jgainerdewar authored Jan 2, 2025
1 parent d68116f commit 11a04df
Show file tree
Hide file tree
Showing 16 changed files with 199 additions and 229 deletions.
167 changes: 87 additions & 80 deletions api/jobs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,13 @@ enum: &TIMEFRAME
- DAYS_30
- ALL_TIME

# NOTE!!!
# Due to the way the Connexion library handles this file, the order in which
# endpoints are defined is important. More narrowly-defined URL paths must be
# listed before wider ones. For example, it's important that /jobs/operationDetails
# is earlier than /jobs/{id} in the below definition. When this is done incorrectly,
# we get confusing errors about missing or incorrect parameters.

paths:
'/capabilities':
get:
Expand All @@ -33,60 +40,56 @@ paths:
tags:
- Capabilities

'/jobs/{id}/abort':
post:
operationId: AbortJob
summary: Abort a job by ID
'/jobs/operationDetails':
get:
operationId: GetOperationDetails
summary: Call for operation details from the Google Pipelines API
parameters:
- name: id
description: Job ID
- name: job
description: job ID
required: true
in: query
type: string
in: path
- name: operation
description: operation ID
type: string
in: query
responses:
'200':
description: Success
'400':
$ref: '#/responses/BadRequest'
'401':
$ref: '#/responses/Unauthorized'
schema:
$ref: '#/definitions/JobOperationResponse'
'404':
$ref: '#/responses/NotFound'
'412':
$ref: '#/responses/JobTerminal'
'500':
$ref: '#/responses/ServerError'
tags:
- Jobs

'/jobs/{id}/updateLabels':
post:
operationId: UpdateJobLabels
summary: Update labels on a job.
'/jobs/tailFile':
get:
operationId: TailFileContents
summary: Get up to a certain amount (from the end) of content from the Google Storage API
parameters:
- name: id
description: Job ID
- name: bucket
description: Google bucket ID
required: true
in: query
type: string
in: path
- name: body
- name: object
description: ID of the file stored in the Google bucket
required: true
in: body
schema:
$ref: '#/definitions/UpdateJobLabelsRequest'
in: query
type: string
responses:
'200':
description: Success
schema:
$ref: '#/definitions/UpdateJobLabelsResponse'
'400':
$ref: '#/responses/BadRequest'
$ref: '#/definitions/FileContents'
'404':
$ref: '#/responses/NotFound'
'500':
$ref: '#/responses/ServerError'
'501':
description: Server does not implement this method.
tags:
- Jobs

Expand Down Expand Up @@ -117,10 +120,10 @@ paths:
tags:
- Jobs

'/jobs/{id}':
get:
operationId: GetJob
summary: Query for job and task-level metadata for a specified job
'/jobs/{id}/abort':
post:
operationId: AbortJob
summary: Abort a job by ID
parameters:
- name: id
description: Job ID
Expand All @@ -130,75 +133,65 @@ paths:
responses:
'200':
description: Success
schema:
$ref: '#/definitions/JobMetadataResponse'
'400':
$ref: '#/responses/BadRequest'
'401':
$ref: '#/responses/Unauthorized'
'404':
$ref: '#/responses/NotFound'
'412':
$ref: '#/responses/JobTerminal'
'500':
$ref: '#/responses/ServerError'
tags:
- Jobs

'/jobs/{id}/{task}/attempts':
get:
operationId: GetTaskAttempts
summary: Query for task-level metadata for a specified job
'/jobs/{id}/updateLabels':
post:
operationId: UpdateJobLabels
summary: Update labels on a job.
parameters:
- name: id
description: Job ID
required: true
type: string
in: path
- name: task
description: task name
- name: body
required: true
type: string
in: path
in: body
schema:
$ref: '#/definitions/UpdateJobLabelsRequest'
responses:
'200':
description: Success
schema:
$ref: '#/definitions/JobAttemptsResponse'
$ref: '#/definitions/UpdateJobLabelsResponse'
'400':
$ref: '#/responses/BadRequest'
'401':
$ref: '#/responses/Unauthorized'
'404':
$ref: '#/responses/NotFound'
'500':
$ref: '#/responses/ServerError'
'501':
description: Server does not implement this method.
tags:
- Jobs

'/jobs/{id}/{task}/{index}/attempts':
'/jobs/{id}':
get:
operationId: GetShardAttempts
summary: Query for shard-level metadata for a specified job
operationId: GetJob
summary: Query for job and task-level metadata for a specified job
parameters:
- name: id
description: Job ID
required: true
type: string
in: path
- name: task
description: task name
required: true
type: string
in: path
- name: index
description: shard index
required: true
type: string
in: path
responses:
'200':
description: Success
schema:
$ref: '#/definitions/JobAttemptsResponse'
$ref: '#/definitions/JobMetadataResponse'
'400':
$ref: '#/responses/BadRequest'
'401':
Expand All @@ -210,52 +203,66 @@ paths:
tags:
- Jobs

'/jobs/operationDetails':
'/jobs/{id}/{task}/attempts':
get:
operationId: GetOperationDetails
summary: Call for operation details from the Google Pipelines API
operationId: GetTaskAttempts
summary: Query for task-level metadata for a specified job
parameters:
- name: job
description: job ID
- name: id
description: Job ID
required: true
in: query
type: string
- name: operation
description: operation ID
in: path
- name: task
description: task name
required: true
type: string
in: query
in: path
responses:
'200':
description: Success
schema:
$ref: '#/definitions/JobOperationResponse'
$ref: '#/definitions/JobAttemptsResponse'
'400':
$ref: '#/responses/BadRequest'
'401':
$ref: '#/responses/Unauthorized'
'404':
$ref: '#/responses/NotFound'
'500':
$ref: '#/responses/ServerError'
tags:
- Jobs

'/jobs/tailFile':
'/jobs/{id}/{task}/{index}/attempts':
get:
operationId: TailFileContents
summary: Get up to a certain amount (from the end) of content from the Google Storage API
operationId: GetShardAttempts
summary: Query for shard-level metadata for a specified job
parameters:
- name: bucket
description: Google bucket ID
- name: id
description: Job ID
required: true
in: query
type: string
- name: object
description: ID of the file stored in the Google bucket
in: path
- name: task
description: task name
required: true
type: string
in: path
- name: index
description: shard index
required: true
in: query
type: string
in: path
responses:
'200':
description: Success
schema:
$ref: '#/definitions/FileContents'
$ref: '#/definitions/JobAttemptsResponse'
'400':
$ref: '#/responses/BadRequest'
'401':
$ref: '#/responses/Unauthorized'
'404':
$ref: '#/responses/NotFound'
'500':
Expand Down
9 changes: 1 addition & 8 deletions servers/cromwell/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,10 @@ WORKDIR /app
COPY --from=0 /job-manager/servers/jm_utils /app/jm_utils
COPY --from=0 /job-manager/servers/cromwell/jobs /app/jobs
COPY ./servers/cromwell/ /app/jobs
# Below is a link explaining where the individual PyYAML install command comes from
# https://github.com/yaml/pyyaml/issues/736#issuecomment-1653209769
# In short, due to Cython 3 being released, PyYAML needs to have it's Cython dependency contrained, otherwise it will fail to install due to deprecated features
# However installation of PyYAML uses a "wheel", which is basically a pre-compiled version of the package
# This is problematic for requirements.txt as it cannot specify a wheel, only a source package
# So we need to install PyYAML separately with the constraint defined in constraints.txt
RUN cd jobs && PIP_CONSTRAINT=constraints.txt pip install PyYAML==5.4.1
RUN cd jobs && pip install -r requirements.txt
# We installed jm_utils so don't need local copy anymore, which breaks imports
RUN rm -rf jm_utils

# Missing required arguments -b PORT, -e ... which must be provided by the
# docker image user.
ENTRYPOINT ["gunicorn", "jobs:run()"]
ENTRYPOINT ["gunicorn", "-k uvicorn.workers.UvicornWorker", "jobs:run()"]
10 changes: 1 addition & 9 deletions servers/cromwell/Dockerfile.dev
Original file line number Diff line number Diff line change
Expand Up @@ -7,18 +7,10 @@ WORKDIR /app
ADD servers/jm_utils /app/jm_utils
ADD servers/cromwell/jobs /app/jobs
COPY servers/cromwell/requirements.txt /app/jobs
COPY servers/cromwell/constraints.txt /app/jobs
# Below is a link explaining where the individual PyYAML install command comes from
# https://github.com/yaml/pyyaml/issues/736#issuecomment-1653209769
# In short, due to Cython 3 being released, PyYAML needs to have it's Cython dependency contrained, otherwise it will fail to install due to deprecated features
# However installation of PyYAML uses a "wheel", which is basically a pre-compiled version of the package
# This is problematic for requirements.txt as it cannot specify a wheel, only a source package
# So we need to install PyYAML separately with the constraint defined in constraints.txt
RUN cd jobs && PIP_CONSTRAINT=constraints.txt pip install PyYAML==5.4.1
RUN cd jobs && pip install -r requirements.txt
# We installed jm_utils so don't need local copy anymore, which breaks imports
RUN rm -rf jm_utils

# Missing required arguments -b PORT, -e ... which must be provided by the
# docker image user.
ENTRYPOINT ["/bin/bash", "/scripts/await_md5_match.sh", "/app/jobs/models/.jobs.yaml.md5", "--", "gunicorn", "jobs:run()"]
ENTRYPOINT ["/bin/bash", "/scripts/await_md5_match.sh", "/app/jobs/models/.jobs.yaml.md5", "--", "gunicorn", "-k uvicorn.workers.UvicornWorker", "jobs:run()"]
2 changes: 1 addition & 1 deletion servers/cromwell/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -369,5 +369,5 @@ To run unit and integration tests on the python-flask app, install
[`tox`](https://github.com/tox-dev/tox).
```
cd servers/cromwell
tox -- -s
tox -- -s .
```
1 change: 0 additions & 1 deletion servers/cromwell/constraints.txt

This file was deleted.

20 changes: 13 additions & 7 deletions servers/cromwell/jobs/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from distutils.util import strtobool

import connexion
from connexion.jsonifier import Jsonifier
import requests
from requests.auth import HTTPBasicAuth

Expand Down Expand Up @@ -54,8 +55,12 @@
# Allow unknown args if we aren't the main program, these include flags to
# gunicorn.
args, _ = parser.parse_known_args()
options = {"swagger_ui": False}
app = connexion.App(__name__, specification_dir='./swagger/', options=options)

options = connexion.options.SwaggerUIOptions(swagger_ui=False)
app = connexion.App(__name__,
specification_dir='./swagger/',
swagger_ui_options=options,
jsonifier=Jsonifier(cls=JSONEncoder))
DEFAULT_CROMWELL_CREDENTIALS = {'cromwell_user': '', 'cromwell_password': ''}

# Load credentials for cromwell
Expand Down Expand Up @@ -95,11 +100,11 @@ def loadCapabilities(capabilities_path):
capabilities_config)
return app.app.config['capabilities']
except IOError as io_err:
logger.exception(
logger.error(
'Failed to load capabilities config, using default display fields. %s',
io_err)
except TypeError as type_err:
logger.exception(
logger.error(
'Failed to load capabilities config, using default display fields. %s',
type_err)

Expand All @@ -109,8 +114,9 @@ def loadCapabilities(capabilities_path):
app.app.config['sam_url'] = args.sam_url
app.app.config['use_caas'] = args.use_caas and args.use_caas.lower() == 'true'
app.app.config['include_subworkflows'] = args.include_subworkflows
app.app.json_encoder = JSONEncoder
app.add_api('swagger.yaml', base_path=args.path_prefix)
app.add_api('swagger.yaml',
base_path=args.path_prefix,
jsonifier=Jsonifier(cls=JSONEncoder))


def run():
Expand All @@ -132,4 +138,4 @@ def run():
logger.critical(err)
logger.critical('Failed to connect to Cromwell: {}'.format(
args.cromwell_url))
return app.app
return app
Loading

0 comments on commit 11a04df

Please sign in to comment.