diff --git a/.github/workflows/auto-format.yaml b/.github/workflows/auto-format.yaml deleted file mode 100644 index a8225520..00000000 --- a/.github/workflows/auto-format.yaml +++ /dev/null @@ -1,42 +0,0 @@ -name: auto-format -on: pull_request -jobs: - format: - # Check if the PR is not from a fork - if: github.event.pull_request.head.repo.full_name == github.repository - runs-on: ubuntu-latest - env: - PYTHON_VERSION: "3.10" - POETRY_VERSION: "1.4.0" - steps: - - name: get code - uses: actions/checkout@v1 - with: - ref: ${{ github.head_ref }} - - name: install python - uses: actions/setup-python@v4 - with: - python-version: ${{ env.PYTHON_VERSION }} - - name: install poetry - uses: snok/install-poetry@v1 - with: - version: ${{ env.POETRY_VERSION }} - - name: install linter dependencies - run: | - make install-lint - - name: run formatter - run: | - make format - - name: check for modified files - id: git-check - run: | - git status - echo ::set-output name=modified::$(if git diff-index --quiet HEAD --; then echo "false"; else echo "true"; fi) - - name: push changes (if needed) - if: steps.git-check.outputs.modified == 'true' - run: | - git config --global user.name 'Auto-format Bot' - git config --global user.email 'autoformatbot@groundlight.ai' - git remote set-url origin https://x-access-token:${{ secrets.GITHUB_TOKEN }}@github.com/${{ github.repository }} - git commit -am "Automatically reformatting code" - git push diff --git a/.github/workflows/cicd.yaml b/.github/workflows/cicd.yaml new file mode 100644 index 00000000..7c92afaa --- /dev/null +++ b/.github/workflows/cicd.yaml @@ -0,0 +1,250 @@ +# CI/CD workflow for the groundlight-sdk-python repository. We lint, test, deploy docs, and publish +# to pypi. +name: cicd +on: [push] + +env: + PYTHON_VERSION: "3.10" + POETRY_VERSION: "1.4.0" +jobs: + # Run our linter on every push to the repository. + lint: + runs-on: ubuntu-latest + steps: + - name: get code + uses: actions/checkout@v3 + - name: install python + uses: actions/setup-python@v4 + with: + python-version: ${{ env.PYTHON_VERSION }} + - name: install poetry + uses: snok/install-poetry@v1 + with: + version: ${{ env.POETRY_VERSION }} + - name: show python version ${{ env.PYTHON_VERSION }} + run: | + poetry run python --version + - name: install linter dependencies + run: | + make install-lint + - name: lint + run: | + make lint + + # Run integration tests against the API. For efficiency, we only run one version of python on + # non-main branches. + test-simple: + runs-on: ubuntu-latest + steps: + - name: get code + uses: actions/checkout@v3 + - name: install python + uses: actions/setup-python@v4 + with: + python-version: ${{ env.PYTHON_VERSION }} + - name: install poetry + uses: snok/install-poetry@v1 + with: + version: ${{ env.POETRY_VERSION }} + - name: show python version ${{ env.PYTHON_VERSION }} + run: | + poetry run python --version + - name: install dependencies (without extras) + run: make install + # TODO: Should we run all tests against the prod API? + - name: run tests + env: + # This is associated with the "sdk-integ-test" user, credentials on 1password + GROUNDLIGHT_API_TOKEN: ${{ secrets.GROUNDLIGHT_API_TOKEN }} + run: make test-integ + - name: run docs tests + run: make test-docs + env: + # This is associated with the "sdk-test-prod" user, credentials on 1password + GROUNDLIGHT_API_TOKEN: ${{ secrets.GROUNDLIGHT_API_TOKEN_PROD }} + + # Run integration tests against the API (only on the main branch, though). The comprehensive + # version runs a matrix of python versions for better coverage. + test-comprehensive: + if: github.ref == 'refs/heads/main' + needs: + - test-simple + runs-on: ubuntu-latest + strategy: + # It's totally debatable which is better here: fail-fast or not. + # Failing fast will use fewer cloud resources, in theory. + # But if the tests are slightly flaky (fail to pip install something) + # Then one flaky install kills lots of jobs that need to be redone. + # So the efficiency argument has its limits + # Failing slow is clearer about what's going on. + # This is pretty unambiguous, so we're going with it for now. + fail-fast: false + matrix: + python-version: [ + #"3.6", # Default on Ubuntu18.04 but openapi-generator fails + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + ] + install_extras: [true, false] + steps: + - name: get code + uses: actions/checkout@v3 + - name: install python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: install poetry + uses: snok/install-poetry@v1 + with: + version: ${{ env.POETRY_VERSION }} + - name: show python version ${{ matrix.python-version }} + run: | + poetry run python --version + - name: install dependencies + run: make install + - name: install extras + if: matrix.install_extras + run: make install-extras + # TODO: Should we run all tests against the prod API? + - name: run tests + env: + # This is associated with the "sdk-integ-test" user, credentials on 1password + GROUNDLIGHT_API_TOKEN: ${{ secrets.GROUNDLIGHT_API_TOKEN }} + run: make test-integ + - name: run docs tests + run: make test-docs + env: + # This is associated with the "sdk-test-prod" user, credentials on 1password + GROUNDLIGHT_API_TOKEN: ${{ secrets.GROUNDLIGHT_API_TOKEN_PROD }} + + # Run the auto-formatter when we're not on the main branch. This will push a new commit to the PR + # branch if needed. + format: + if: github.ref != 'refs/heads/main' + runs-on: ubuntu-latest + steps: + - name: get code + uses: actions/checkout@v1 + with: + ref: ${{ github.head_ref }} + - name: install python + uses: actions/setup-python@v4 + with: + python-version: ${{ env.PYTHON_VERSION }} + - name: install poetry + uses: snok/install-poetry@v1 + with: + version: ${{ env.POETRY_VERSION }} + - name: show python version ${{ env.PYTHON_VERSION }} + run: | + poetry run python --version + - name: install linter dependencies + run: | + make install-lint + - name: run formatter + run: | + make format + - name: check for modified files + id: git-check + run: | + git status + echo ::set-output name=modified::$(if git diff-index --quiet HEAD --; then echo "false"; else echo "true"; fi) + - name: push changes (if needed) + if: steps.git-check.outputs.modified == 'true' + run: | + git config --global user.name 'Auto-format Bot' + git config --global user.email 'autoformatbot@groundlight.ai' + git remote set-url origin https://x-access-token:${{ secrets.GITHUB_TOKEN }}@github.com/${{ github.repository }} + git commit -am "Automatically reformatting code" + git push + + # Check if we have changes in the docs directory, and if so, set a `changed` flag. + check-docs-changes: + runs-on: ubuntu-latest + outputs: + changed: ${{ steps.changes.outputs.changed }} + steps: + - name: get code + uses: actions/checkout@v3 + - name: check for docs changes + id: changes + run: | + if git diff --name-only ${{ github.event.before }} ${{ github.event.after }} | grep '^docs/'; then + echo "Changes detected in 'docs'" + echo "::set-output name=changed::true" + else + echo "No changes detected in 'docs'" + echo "::set-output name=changed::false" + fi + + # Try to build the docs if there are changes in the docs directory, and deploy to github pages if + # we're on the main branch and tests passed. Note that we don't require a code release -- we don't + # want to couple documentation updates with code releases. + deploy-docs: + if: needs.check-docs-changes.outputs.changed == 'true' && github.ref == 'refs/heads/main' + needs: + - check-docs-changes + - test-comprehensive + runs-on: ubuntu-latest + steps: + - name: Get code + uses: actions/checkout@v3 + - name: Setup npm + uses: actions/setup-node@v3 + with: + node-version: 18 + cache: npm + - name: Install dependencies + run: npm install + - name: Build website + run: npm run build + - name: Deploy website (if on main branch) + # Docs: https://github.com/peaceiris/actions-gh-pages#%EF%B8%8F-docusaurus + uses: peaceiris/actions-gh-pages@v3 + # Only deploy on pushes to the `main` branch + if: github.ref == 'refs/heads/main' + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./docs/build + #TODO: setup a staging directory when doing a PR + #destination_dir: staging + # The following lines assign commit authorship to the official + # GH-Actions bot for deploys to `gh-pages` branch: + # https://github.com/actions/checkout/issues/13#issuecomment-724415212 + # The GH actions bot is used by default if you didn't specify the two fields. + # You can swap them out with your own user credentials. + user_name: github-actions[bot] + user_email: 41898282+github-actions[bot]@users.noreply.github.com + + # When a release is created on github (and comprehensive tests passed), publish the groundlight + # package to public pypi. + publish-python-package: + if: github.ref == 'refs/heads/main' && github.event_name == 'release' + runs-on: ubuntu-latest + needs: + - test-comprehensive + # For now, we'll require the comprehensive tests to succeed, but not the linter checks. + # - lint + env: + POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_PUBLISH_TOKEN }} + steps: + - name: get code + uses: actions/checkout@v3 + - name: install python + uses: actions/setup-python@v4 + with: + python-version: ${{ env.PYTHON_VERSION }} + - name: install poetry + uses: snok/install-poetry@v1 + with: + version: ${{ env.POETRY_VERSION }} + - name: show python version ${{ env.PYTHON_VERSION }} + run: | + poetry run python --version + - name: build package + run: poetry build + - name: configure poetry and publish + run: poetry publish diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml deleted file mode 100644 index aaec6a59..00000000 --- a/.github/workflows/lint.yaml +++ /dev/null @@ -1,27 +0,0 @@ -# Run our linter on every push to the repository. -name: lint -on: [push] - -jobs: - lint: - runs-on: ubuntu-latest - env: - PYTHON_VERSION: "3.10" - POETRY_VERSION: "1.4.0" - steps: - - name: install python - uses: actions/setup-python@v4 - with: - python-version: ${{ env.PYTHON_VERSION }} - - name: install poetry - uses: snok/install-poetry@v1 - with: - version: ${{ env.POETRY_VERSION }} - - name: get code - uses: actions/checkout@v3 - - name: install linter dependencies - run: | - make install-lint - - name: lint - run: | - make lint diff --git a/.github/workflows/publish-docs.yaml b/.github/workflows/publish-docs.yaml deleted file mode 100644 index 26a77e22..00000000 --- a/.github/workflows/publish-docs.yaml +++ /dev/null @@ -1,45 +0,0 @@ -name: publish-docs -on: - push: - paths: - # Only run this workflow if there are changes in any of these files. - - .github/workflows/** - - docs/** - branches: - - main -defaults: - run: - working-directory: docs - -jobs: - deploy-docs: - runs-on: ubuntu-latest - steps: - - name: Get code - uses: actions/checkout@v3 - - name: Setup npm - uses: actions/setup-node@v3 - with: - node-version: 18 - cache: npm - - name: Install dependencies - run: npm install - - name: Build website - run: npm run build - - name: Deploy website - # Docs: https://github.com/peaceiris/actions-gh-pages#%EF%B8%8F-docusaurus - uses: peaceiris/actions-gh-pages@v3 - # Only deploy on pushes to the `main` branch - if: github.ref == 'refs/heads/main' - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./docs/build - #TODO: setup a staging directory when doing a PR - #destination_dir: staging - # The following lines assign commit authorship to the official - # GH-Actions bot for deploys to `gh-pages` branch: - # https://github.com/actions/checkout/issues/13#issuecomment-724415212 - # The GH actions bot is used by default if you didn't specify the two fields. - # You can swap them out with your own user credentials. - user_name: github-actions[bot] - user_email: 41898282+github-actions[bot]@users.noreply.github.com diff --git a/.github/workflows/publish-pypi.yaml b/.github/workflows/publish-pypi.yaml deleted file mode 100644 index 5da264f8..00000000 --- a/.github/workflows/publish-pypi.yaml +++ /dev/null @@ -1,26 +0,0 @@ -# When a release is created on github, publish the groundlight package to public pypi -# (Similar to https://github.com/positronix-ai/predictors/blob/main/.github/workflows/publish.yaml) -name: publish package -on: - release: - types: [created] -jobs: - publish-python-package: - runs-on: ubuntu-latest - env: - POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_PUBLISH_TOKEN }} - steps: - - name: install python - uses: actions/setup-python@v3 - with: - python-version: "3.10" - - name: install poetry - run: | - pip install -U pip - pip install poetry - - name: get code - uses: actions/checkout@v2 - - name: build package - run: poetry build - - name: configure poetry and publish - run: poetry publish diff --git a/.github/workflows/test-integ.yaml b/.github/workflows/test-integ.yaml deleted file mode 100644 index bbad87c6..00000000 --- a/.github/workflows/test-integ.yaml +++ /dev/null @@ -1,64 +0,0 @@ -# Run integration tests against the integ API endpoint -name: test integ -on: - push: - branches: - - main - pull_request: -jobs: - run-tests: - runs-on: ubuntu-20.04 - strategy: - # It's totally debatable which is better here: fail-fast or not. - # Failing fast will use fewer cloud resources, in theory. - # But if the tests are slightly flaky (fail to pip install something) - # Then one flaky install kills lots of jobs that need to be redone. - # So the efficiency argument has its limits - # Failing slow is clearer about what's going on. - # This is pretty unambiguous, so we're going with it for now. - fail-fast: false - matrix: - python-version: [ - #"3.6", # Default on Ubuntu18.04 but openapi-generator fails - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - ] - install_numpy: [true, false] - install_pillow: [true, false] - steps: - - name: get code - uses: actions/checkout@v3 - - name: install python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - name: Display Python version - run: python -c "import sys; print(sys.version)" - - name: install poetry and build poetry environment - run: | - pip install -U pip - pip install poetry - poetry install - - name: setup environment - run: make install - - name: install numpy - if: matrix.install_numpy - run: | - poetry run pip install numpy - - name: install pillow - if: matrix.install_pillow - run: | - poetry run pip install pillow - - name: run tests - env: - # This is associated with the "sdk-integ-test" user, credentials on 1password - GROUNDLIGHT_API_TOKEN: ${{ secrets.GROUNDLIGHT_API_TOKEN }} - run: make test-integ - - name: run docs tests - run: make test-docs - env: - # This is associated with the "sdk-test-prod" user, credentials on 1password - GROUNDLIGHT_API_TOKEN: ${{ secrets.GROUNDLIGHT_API_TOKEN_PROD }} diff --git a/Makefile b/Makefile index 0d758d7f..4df8d4b4 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,9 @@ install: ## Install the package from source poetry install +install-extras: install ## Install the package from source with extra dependencies + poetry run pip install numpy + install-lint: ## Only install the linter dependencies poetry install --only lint diff --git a/docs/docs/installation/libraries-numpy-pil.md b/docs/docs/installation/libraries-numpy-pil.md index 2b6df576..8ed8ef5f 100644 --- a/docs/docs/installation/libraries-numpy-pil.md +++ b/docs/docs/installation/libraries-numpy-pil.md @@ -3,7 +3,7 @@ ## Smaller is better! Groundlight is optimized to run on small edge devices. As such, you can use the Groundlight SDK without -installing large libraries like `numpy` or `PIL` or `OpenCV`. +installing large libraries like `numpy` or `OpenCV`. But if you're already installing them, we'll use them. Our SDK detects if these libraries are installed and will make use of them if they're present. If not, we'll gracefully degrade, and tell you what's @@ -16,7 +16,7 @@ Pixel values should be from 0-255 (not 0.0-1.0 as floats). SO `uint8` data type Here's sample code to create an 800x600 random image in numpy: -``` +```python notest import numpy as np img = np_img = np.random.uniform(0, 255, (600, 800, 3)) @@ -31,7 +31,7 @@ The Groundlight SDK can accept PIL images directly in `submit_image_query`. OpenCV creates images that are stored as numpy arrays. So can send them to `submit_image_query` directly. BUT! OpenCV uses BGR color order, not RGB. You can reverse them as follows: -``` +```python notest rgb_img = bgr_img[:, :, ::-1] ```