Skip to content

Commit

Permalink
Merge pull request #109 from lsst-sqre/u/jsickcodes/s3-presigned-post-v4
Browse files Browse the repository at this point in the history
Use AWS signature v4 for presigned POST URLs
  • Loading branch information
jonathansick authored Mar 25, 2022
2 parents 0a8e995 + 05426ab commit 6c80010
Show file tree
Hide file tree
Showing 29 changed files with 445 additions and 205 deletions.
2 changes: 1 addition & 1 deletion bin/install-base-packages.sh
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ apt-get -y upgrade
# Install system packages
# - build-essentiall needed for uwsgi
# - git needed for setuptools_scm
apt-get -y install --no-install-recommends git build-essential redis-server dnsutils wget
apt-get -y install --no-install-recommends git build-essential redis-server dnsutils wget iputils-ping

# Delete cached files we don't need anymore:
apt-get clean
Expand Down
4 changes: 0 additions & 4 deletions bin/start-api.bash
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,6 @@

set -eu

echo $PATH
pwd
ls migrations

flask createdb migrations/alembic.ini
flask init
uwsgi uwsgi.ini
16 changes: 11 additions & 5 deletions keeper/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,9 @@
from keeper.models import Permission, User, db
from keeper.version import get_version

# from psycopg2.errors import UndefinedTable


if TYPE_CHECKING:
from flask import Flask

Expand Down Expand Up @@ -54,11 +57,14 @@ def createdb_command(alembicconf: str) -> None:
To migrate database servers, see the copydb sub-command.
"""
db.create_all()

# stamp tables with latest schema version
alembic_cfg = alembic.config.Config(alembicconf)
alembic.command.stamp(alembic_cfg, "head")
try:
User.query.get(1)
except Exception:
db.create_all()

# stamp tables with latest schema version
alembic_cfg = alembic.config.Config(alembicconf)
alembic.command.stamp(alembic_cfg, "head")


@click.command("init")
Expand Down
66 changes: 51 additions & 15 deletions keeper/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,11 @@
import logging
import os
import sys
from typing import TYPE_CHECKING, Dict, Optional, Type
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type

import structlog
from structlog.stdlib import add_log_level
from structlog.types import EventDict

from keeper.models import EditionKind

Expand Down Expand Up @@ -198,28 +200,62 @@ def init_app(cls, app: Flask) -> None:
stream_handler = logging.StreamHandler(stream=sys.stdout)
stream_handler.setFormatter(logging.Formatter("%(message)s"))
logger = logging.getLogger("keeper")
logger.handlers = []
logger.addHandler(stream_handler)
if logger.hasHandlers():
logger.handlers.clear()
logger.setLevel(logging.INFO)
logger.setLevel("INFO")

processors: List[Any] = [
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.StackInfoRenderer(),
structlog.processors.UnicodeDecoder(),
]
# JSON-formatted logging
processors.append(add_log_severity)
processors.append(structlog.processors.format_exc_info)
processors.append(structlog.processors.JSONRenderer())

structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog.processors.JSONRenderer(),
],
context_class=structlog.threadlocal.wrap_dict(dict),
processors=processors,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)


def add_log_severity(
logger: logging.Logger, method_name: str, event_dict: EventDict
) -> EventDict:
"""Add the log level to the event dict as ``severity``.
Intended for use as a structlog processor.
This is the same as `structlog.stdlib.add_log_level` except that it
uses the ``severity`` key rather than ``level`` for compatibility with
Google Log Explorer and its automatic processing of structured logs.
Parameters
----------
logger : `logging.Logger`
The wrapped logger object.
method_name : `str`
The name of the wrapped method (``warning`` or ``error``, for
example).
event_dict : `structlog.types.EventDict`
Current context and current event. This parameter is also modified in
place, matching the normal behavior of structlog processors.
Returns
-------
event_dict : `structlog.types.EventDict`
The modified `~structlog.types.EventDict` with the added key.
"""
severity = add_log_level(logger, method_name, {})["level"]
event_dict["severity"] = severity
return event_dict


config: Dict[str, Type[Config]] = {
"development": DevelopmentConfig,
"testing": TestConfig,
Expand Down
51 changes: 40 additions & 11 deletions keeper/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -342,6 +342,17 @@ class Organization(db.Model): # type: ignore
aws_encrypted_secret_key = db.Column(db.LargeBinary, nullable=True)
"""The AWS secret key."""

# FIXME nullable for migration
aws_region = db.Column(db.Unicode(255), nullable=True, default="us-east-1")
"""The AWS region of the S3 bucket."""

# FIXME nullable for migration
bucket_public_read = db.Column(db.Boolean, nullable=True, default=False)
"""If True, objects in the S3 bucket will have the ``public-read`` ACL.
For objects using a proxy, this can be False.
"""

products = db.relationship(
"Product", back_populates="organization", lazy="dynamic"
)
Expand All @@ -358,17 +369,35 @@ class Organization(db.Model): # type: ignore
back_populates="organization",
)

def get_aws_region(self) -> str:
"""Get the AWS region (adapter while column is nullable for
migration).
"""
if self.aws_region is None:
return "us-east-1"
else:
return self.aws_region

def get_bucket_public_read(self) -> bool:
"""Get the S3 public public-read ACL configuration (adapter while
column is nullable for migration.
"""
if self.bucket_public_read is None:
return False
else:
return self.bucket_public_read

def set_fastly_api_key(self, api_key: Optional[SecretStr]) -> None:
"""Encrypt and set the Fastly API key."""
if api_key is None:
return
self.fastly_encrypted_api_key = self._encrypt_secret_str(api_key)

def get_fastly_api_key(self) -> SecretStr:
def get_fastly_api_key(self) -> Optional[SecretStr]:
"""Get the decrypted Fastly API key."""
encrypted_key = self.fastly_encrypted_api_key
if encrypted_key is None:
raise ValueError("fastly_encrypted_api_key is not set.")
return None
return self._decrypt_to_secret_str(encrypted_key)

def set_aws_secret_key(self, secret_key: Optional[SecretStr]) -> None:
Expand Down Expand Up @@ -679,11 +708,17 @@ def register_uploaded_build(self) -> None:

def get_tracking_editions(self) -> List[Edition]:
"""Get the editions that should rebuild to this build."""
logger = get_logger(__name__)
editions = (
Edition.query.autoflush(False)
.filter(Edition.product == self.product)
.all()
)
logger.debug(
"In get_tracking_editions found editions for product",
count=len(editions),
editions=str(editions),
)

return [
edition
Expand Down Expand Up @@ -842,14 +877,10 @@ def should_rebuild(self, build: Build) -> bool:
`True` if the edition should be rebuilt using this Build, or
`False` otherwise.
"""
# shim during refactoring
from keeper.api._urls import url_for_edition

logger = get_logger(__name__)
logger.debug("Inside Edition.should_rebuild")

logger.debug(
"Edition {!r} in should_rebuild".format(url_for_edition(self))
)
logger = get_logger(__name__)

candidate_build = build

Expand All @@ -862,11 +893,9 @@ def should_rebuild(self, build: Build) -> bool:
try:
tracking_mode = edition_tracking_modes[self.mode]
except (KeyError, ValidationError):

tracking_mode = edition_tracking_modes[self.default_mode_id]
logger.warning(
"Edition {!r} has an unknown tracking"
"mode".format(url_for_edition(self))
"Edition {!r} has an unknown tracking" "mode".format(self.slug)
)

return tracking_mode.should_update(self, candidate_build)
Expand Down
Loading

0 comments on commit 6c80010

Please sign in to comment.