Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 38 additions & 0 deletions .github/workflows/dev_ecr_push.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
name: dev ECR push
on:
push:
branches:
- main
# Set defaults
defaults:
run:
shell: bash

env:
AWS_REGION: "us-east-1"
AWS_ACCOUNT_ID: "222053980223"
IAM_ROLE: "ppod-gha-dev"

jobs:
deploy:
name: Deploy dev build
runs-on: ubuntu-latest
# These permissions are needed to interact with GitHub's OIDC Token endpoint.
permissions:
id-token: write
contents: read

steps:
- uses: actions/checkout@v2
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
role-to-assume: arn:aws:iam::${{ env.AWS_ACCOUNT_ID }}:role/${{ env.IAM_ROLE }}
aws-region: ${{ env.AWS_REGION }}

- name: Build image
run: make dist-dev
- name: Push image
run: make publish-dev
- name: Update lambda function
run: make update-lambda-dev
4 changes: 2 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ update: install ## Update all Python dependencies

### Test commands ###
test: ## Run tests and print a coverage report
pipenv run coverage run --source=. -m pytest
pipenv run coverage run --include=ppod.py -m pytest
pipenv run coverage report -m

coveralls: test
Expand Down Expand Up @@ -53,7 +53,7 @@ publish-dev: dist-dev ## Build, tag and push
docker push $(ECR_REGISTRY_DEV)/ppod-dev:latest
docker push $(ECR_REGISTRY_DEV)/ppod-dev:`git describe --always`

update-format-lambda-dev: ## Updates the lambda with whatever is the most recent image in the ecr
update-lambda-dev: ## Updates the lambda with whatever is the most recent image in the ecr
aws lambda update-function-code \
--function-name ppod-dev \
--image-uri $(shell aws sts get-caller-identity --query Account --output text).dkr.ecr.us-east-1.amazonaws.com/ppod-dev:latest
3 changes: 3 additions & 0 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,17 @@ verify_ssl = true
name = "pypi"

[packages]
boto3 = "*"
sentry-sdk = "*"
smart-open = "*"

[dev-packages]
bandit = "*"
black = "*"
coverage = "*"
flake8 = "*"
isort = "*"
moto = "*"
mypy = "*"
pytest = "*"

Expand Down
330 changes: 328 additions & 2 deletions Pipfile.lock

Large diffs are not rendered by default.

3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,14 @@ make lint
```

## Required ENV
`BUCKET` = The bucket containing the compressed MARCXML files to be submitted to POD.

`SENTRY_DSN` = If set to a valid Sentry DSN, enables Sentry exception monitoring. This is not needed for local development.

`WORKSPACE` = Set to `dev` for local development, this will be set to `stage` and `prod` in those environments by Terraform.

### To run locally
NOTE: These instructions for running locally don't currently work and functionality has to be verified in our dev AWS account.
- Build the container:
```bash
docker build -t ppod .
Expand Down
45 changes: 45 additions & 0 deletions conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import os

import boto3
import pytest
from moto import mock_s3


@pytest.fixture(scope="session")
def aws_credentials():
os.environ["AWS_ACCESS_KEY_ID"] = "testing"
os.environ["AWS_DEFAULT_REGION"] = "us-east-1"
os.environ["AWS_SECRET_ACCESS_KEY"] = "testing"


@pytest.fixture()
def request_data_matching_file():
request_data = {"filename-prefix": "upload/"}
yield request_data


@pytest.fixture(scope="session")
def mocked_s3(aws_credentials):
with mock_s3():
s3 = boto3.client("s3", region_name="us-east-1")
s3.create_bucket(Bucket="ppod")
s3.put_object(
Body=open("fixtures/pod.tar.gz", "rb"),
Bucket="ppod",
Key="upload/pod.tar.gz",
)
s3.create_bucket(Bucket="no_files")
s3.create_bucket(Bucket="a_lot_of_files")
for i in range(1001):
s3.put_object(
Body=str(i),
Bucket="a_lot_of_files",
Key=f"upload/{i}.txt",
)
yield s3


@pytest.fixture(autouse=True)
def test_env():
os.environ = {"WORKSPACE": "test", "BUCKET": "ppod"}
yield
Binary file added fixtures/pod.tar.gz
Binary file not shown.
Loading