Skip to content
This repository has been archived by the owner on Oct 18, 2023. It is now read-only.

Commit

Permalink
Feature/issue 23 (#25)
Browse files Browse the repository at this point in the history
* pylint, flake8 and github actions with terraform

* added package.json for snyk

* added db module from repo

* change project.toml for hydrocron-db

* disable test for now

* added build.sh

* ignore dynamodb_local to save space

* updated

* Remove the now ignored directory

* force re-run

* force re-run

* reducing size

* force re-run

* enable test

* removed package.json

* removed node stuff

* force re-run

* remove terraform for this PR

* snyk timedout

* updated changelog

* tidy up

* remove terraform from GA

* Fixed tests with poetry

* Upload jars

* Remaining jar files

* changes in gitignore

---------

Co-authored-by: vggonzal <9Tcostoamm>
  • Loading branch information
vggonzal authored Oct 4, 2023
1 parent 054dae2 commit 7303265
Show file tree
Hide file tree
Showing 14 changed files with 675 additions and 152 deletions.
162 changes: 162 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,162 @@
# This is the main build pipeline that verifies and publishes the software
name: Build
# Controls when the workflow will run
on:
# Triggers the workflow on push events
push:
branches: [ develop, release/**, main, feature/**, issue/**, dependabot/** ]
tags-ignore:
- '*'
paths-ignore:
- 'pyproject.toml'

# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:

env:
POETRY_VERSION: "1.3.1"
PYTHON_VERSION: "3.10"
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}

jobs:
# First job in the workflow installs and verifies the software
build:
name: Build, Test, Verify, Publish
# The type of runner that the job will run on
runs-on: ubuntu-latest
steps:
- uses: getsentry/action-github-app-token@v2
name: podaac cicd token
id: podaac-cicd
with:
app_id: ${{ secrets.CICD_APP_ID }}
private_key: ${{ secrets.CICD_APP_PRIVATE_KEY }}
- uses: actions/checkout@v3
with:
repository: ${{ github.repository }}
token: ${{ steps.podaac-cicd.outputs.token }}
- uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}


- name: Install bumpver & poetry
run: pip3 install bumpver poetry poetry-plugin-bundle
- name: Install dependencies
run: poetry install

- name: Get version
id: get-version
run: |
echo "current_version=$(poetry version | awk '{print $2}')" >> $GITHUB_OUTPUT
echo "pyproject_name=$(poetry version | awk '{print $1}')" >> $GITHUB_ENV
- name: Bump pre-alpha version
# If triggered by push to a feature branch
if: |
startsWith(github.ref, 'refs/heads/feature') ||
startsWith(github.ref, 'refs/heads/issue') ||
startsWith(github.ref, 'refs/heads/dependabot')
run: |
new_ver="${{ steps.get-version.outputs.current_version }}+$(git rev-parse --short ${GITHUB_SHA})"
poetry version $new_ver
echo "software_version=$(poetry version | awk '{print $2}')" >> $GITHUB_ENV
- name: Bump alpha version
# If triggered by push to the develop branch
if: ${{ github.ref == 'refs/heads/develop' }}
run: |
poetry version prerelease
echo "software_version=$(poetry version | awk '{print $2}')" >> $GITHUB_ENV
echo "venue=sit" >> $GITHUB_ENV
- name: Bump rc version
# If triggered by push to a release branch
if: ${{ startsWith(github.ref, 'refs/heads/release/') }}
env:
# True if the version already has a 'rc' pre-release identifier
BUMP_RC: ${{ contains(steps.get-version.outputs.current_version, 'rc') }}
run: |
if [ "$BUMP_RC" = true ]; then
poetry version prerelease
else
poetry version ${GITHUB_REF#refs/heads/release/}-rc.1
fi
echo "software_version=$(poetry version | awk '{print $2}')" >> $GITHUB_ENV
echo "venue=uat" >> $GITHUB_ENV
- name: Release version
# If triggered by push to the main branch
if: ${{ startsWith(github.ref, 'refs/heads/main') }}
env:
CURRENT_VERSION: ${{ steps.get-version.outputs.current_version }}
# Remove -rc.* from end of version string
# The ${string%%substring} syntax below deletes the longest match of $substring from back of $string.
run: |
poetry version ${CURRENT_VERSION%%-rc.*}
echo "software_version=$(poetry version | awk '{print $2}')" >> $GITHUB_ENV
echo "venue=ops" >> $GITHUB_ENV
- name: Install hydrocron
run: poetry install
- name: Lint
run: |
poetry run pylint hydrocronapi
poetry run flake8 hydrocronapi
## Set environment variables
- name: Configure Initial YAML file and environment variables
run: |
echo "THE_VERSION=${{ env.software_version }}" >> $GITHUB_ENV;
echo "GIT_BRANCH=${GITHUB_REF#refs/heads/}" >> $GITHUB_ENV;
GITHUB_REF_READABLE="${GITHUB_REF//\//-}"
echo "GITHUB_REF_READABLE=${GITHUB_REF_READABLE}" >> $GITHUB_ENV
echo "THE_ENV=sit" >> $GITHUB_ENV
echo "TARGET_ENV_UPPERCASE=SIT" >> $GITHUB_ENV
- name: Run Snyk as a blocking step
uses: snyk/actions/python-3.9@master
env:
SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}
with:
command: test
args: >
--org=${{ secrets.SNYK_ORG_ID }}
--project-name=${{ github.repository }}
--severity-threshold=high
--fail-on=all
- name: Run Snyk on Python
uses: snyk/actions/python-3.9@master
env:
SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}
with:
command: monitor
args: >
--org=${{ secrets.SNYK_ORG_ID }}
--project-name=${{ github.repository }}
- name: Commit Version Bump
# If building develop, a release branch, or main then we commit the version bump back to the repo
if: |
github.ref == 'refs/heads/develop' ||
github.ref == 'refs/heads/main' ||
startsWith(github.ref, 'refs/heads/release')
run: |
git config user.name "${GITHUB_ACTOR}"
git config user.email "${GITHUB_ACTOR}@users.noreply.github.com"
git commit -am "/version ${{ env.software_version }}"
git push
- name: Push Tag
if: |
github.ref == 'refs/heads/develop' ||
github.ref == 'refs/heads/main' ||
startsWith(github.ref, 'refs/heads/release')
run: |
git config user.name "${GITHUB_ACTOR}"
git config user.email "${GITHUB_ACTOR}@users.noreply.github.com"
git tag -a "${{ env.software_version }}" -m "Version ${{ env.software_version }}"
git push origin "${{ env.software_version }}"
- name: Build Python Artifact
run: |
poetry build
- name: Test with pytest
run: |
poetry run pytest tests/test_api.py -k 'test_gettimeseries_get'
poetry run pytest tests/test_api.py -k 'test_getsubset_get'
6 changes: 4 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ __pycache__/
*.pyc

# C extensions
*.so
#*.so

# Distribution / packaging
.Python
Expand Down Expand Up @@ -69,4 +69,6 @@ hydrocronapi/controllers/__pycache__/

.idea
.env
docker/dynamodb
docker/dynamodb

node_modules/
7 changes: 3 additions & 4 deletions .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@ exclude-too-few-public-methods=
ignored-parents=

# Maximum number of arguments for function / method.
max-args=5
max-args=6

# Maximum number of attributes for a class (see R0902).
max-attributes=7
Expand Down Expand Up @@ -435,8 +435,7 @@ timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.

# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX,
TODO
XXX

# Regular expression of note tags to take in consideration.
notes-rgx=
Expand All @@ -445,7 +444,7 @@ notes-rgx=
[REFACTORING]

# Maximum number of nested blocks for function / method body
max-nested-blocks=5
max-nested-blocks=6

# Complete name of functions that never returns. When checking for
# inconsistent-return-statements if a never returning function is called then
Expand Down
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]
### Added
- Issue 8 - Hydrocron API implementation with mysql local database
- Issue 23 - Added github actions with Snyk, pylint, flake8
### Changed
- Issue 8 - Hydrocron API implementation with dynamodb local database
- Issue 8 - Rearrange database code
Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ docker compose up
To run the server, please execute the following from the root directory:

```
python3 -m hydrocronapi
HYDROCRON_ENV=dev python -m hydrocronapi
```

and open your browser to here:
Expand All @@ -44,7 +44,7 @@ Your Swagger definition lives here:
http://localhost:8080/hydrocron/HydroAPI/1.0.0/swagger.json
```

## Running with Docker
## Running with Docker

To run the server on a Docker container, please execute the following from the root directory:

Expand Down
11 changes: 9 additions & 2 deletions hydrocronapi/__main__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,14 @@
#!/usr/bin/env python3
"""
Hydrocron API module
"""
# !/usr/bin/env python3


def main():
from hydrocronapi import hydrocron
"""
Main function to run flask app in port 8080
"""
from hydrocronapi import hydrocron # noqa: E501 # pylint: disable=import-outside-toplevel
hydrocron.flask_app.run(port=8080)


Expand Down
73 changes: 47 additions & 26 deletions hydrocronapi/controllers/subset.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,15 @@
"""
Hydrocron API subset controller
"""
# pylint: disable=C0103
import json
import logging
import time
from datetime import datetime
from typing import Generator
from shapely import Polygon, Point
from hydrocronapi import hydrocron

from shapely import Polygon, Point

logger = logging.getLogger()

Expand Down Expand Up @@ -52,15 +56,15 @@ def getsubset_get(feature, subsetpolygon, start_time, end_time, output, fields):
return data


def format_subset_json(results: Generator, polygon, exact, time):
def format_subset_json(results: Generator, polygon, exact, dataTime): # noqa: E501 # pylint: disable=W0613
"""
Parameters
----------
cur
swot_id
results
polygon
exact
time
dataTime
Returns
-------
Expand All @@ -79,34 +83,18 @@ def format_subset_json(results: Generator, polygon, exact, time):
else:

data['status'] = "200 OK"
data['time'] = str(time) + " ms."
data['time'] = str(dataTime) + " ms."
# data['search on'] = {"feature_id": feature_id}
data['type'] = "FeatureCollection"
data['features'] = []
i = 0
total = len(results)
for t in results:
flag_polygon = False
if ((t['time'] != '-999999999999')): # and (t['width'] != '-999999999999')):
if t['time'] != '-999999999999': # and (t['width'] != '-999999999999')):
feature = {}
feature['properties'] = {}
feature['geometry'] = {}
feature['type'] = "Feature"
feature['geometry']['coordinates'] = []
'''
geometry = t['geometry'].replace('"LINESTRING (','').replace(')"','')
for p in geometry.split("; "):
(x, y) = p.split(" ")
point = Point(x, y)
if (polygon.contains(point)):
feature['geometry']['coordinates'].append([float(x),float(y)])
feature['properties']['time'] = datetime.fromtimestamp(float(t['time'])+946710000).strftime("%Y-%m-%d %H:%M:%S")
feature['properties']['reach_id'] = float(t['reach_id'])
feature['properties']['wse'] = float(t['wse'])
feature['properties']['slope'] = float(t['slope'])
flag_polygon = True
if (flag_polygon):
'''
point = Point(float(t['p_lon']), float(t['p_lat']))
if polygon.contains(point):
feature_type = ''
Expand Down Expand Up @@ -146,15 +134,15 @@ def format_subset_json(results: Generator, polygon, exact, time):
return data


def format_subset_csv(results: Generator, polygon, exact, time, fields):
def format_subset_csv(results: Generator, polygon, exact, dataTime, fields): # noqa: E501 # pylint: disable=W0613
"""
Parameters
----------
results
swot_id
exact
time
dataTime
Returns
-------
Expand All @@ -174,7 +162,6 @@ def format_subset_csv(results: Generator, polygon, exact, time, fields):
csv = fields + '\n'
fields_set = fields.split(", ")
for t in results:
flag_polygon = False
if t['time'] != '-999999999999': # and (t['width'] != '-999999999999')):
point = Point(float(t['p_lon']), float(t['p_lat']))
if polygon.contains(point):
Expand All @@ -193,3 +180,37 @@ def format_subset_csv(results: Generator, polygon, exact, time, fields):
csv += '\n'

return csv


def lambda_handler(event, context): # noqa: E501 # pylint: disable=W0613
"""
This function queries the database for relevant results
"""

feature = event['body']['feature']
subsetpolygon = event['body']['subsetpolygon']
start_time = event['body']['start_time']
end_time = event['body']['end_time']
output = event['body']['output']
fields = event['body']['fields']

results = getsubset_get(feature, subsetpolygon, start_time, end_time, output, fields)

data = {}

status = "200 OK"

data['status'] = status
data['time'] = str(10) + " ms."
data['hits'] = 10

data['search on'] = {
"parameter": "identifier",
"exact": "exact",
"page_number": 0,
"page_size": 20
}

data['results'] = results

return data
Loading

0 comments on commit 7303265

Please sign in to comment.