Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

customize from eoapi #1

Merged
merged 4 commits into from
May 20, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 0 additions & 35 deletions .env.example

This file was deleted.

66 changes: 66 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
name: CI

# On every pull request, but only on push to main
on:
push:
branches:
- main
tags:
- '*'
paths:
- 'runtimes/**'
- 'docker-compose.*'
- '.github/workflows/ci.yml'
- '.pre-commit-config.yaml'
pull_request:

jobs:
tests:
runs-on: ubuntu-latest
strategy:
fail-fast: false
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.11'

- name: Run pre-commit
run: |
python -m pip install --upgrade pip
python -m pip install pre-commit
pre-commit run --all-files

- name: Launch services
run: docker compose up -d

- name: install lib postgres
uses: nyurik/action-setup-postgis@v1

- name: Install python dependencies
run: |
python -m pip install pytest httpx pypgstac==0.8.5 psycopg[pool]

- name: Ingest Stac Items/Collection
run: |
pypgstac pgready --dsn postgresql://username:[email protected]:5439/postgis
pypgstac load collections .github/workflows/data/noaa-emergency-response.json --dsn postgresql://username:[email protected]:5439/postgis --method insert_ignore
pypgstac load items .github/workflows/data/noaa-eri-nashville2020.json --dsn postgresql://username:[email protected]:5439/postgis --method insert_ignore
psql postgresql://username:[email protected]:5439/postgis -f .github/workflows/data/my_data.sql

# see https://github.com/developmentseed/tipg/issues/37
- name: Restart the Vector service
run: |
docker compose stop vector
docker compose up -d vector

- name: Sleep for 10 seconds
run: sleep 10s
shell: bash

- name: Integrations tests
run: python -m pytest .github/workflows/tests/

- name: Stop services
run: docker compose stop
16 changes: 16 additions & 0 deletions .github/workflows/data/my_data.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
SET standard_conforming_strings = OFF;
DROP TABLE IF EXISTS "public"."my_data" CASCADE;
DELETE FROM geometry_columns WHERE f_table_name = 'my_data' AND f_table_schema = 'public';
BEGIN;
CREATE TABLE "public"."my_data" ( "ogc_fid" SERIAL, CONSTRAINT "my_data_pk" PRIMARY KEY ("ogc_fid") );
SELECT AddGeometryColumn('public','my_data','geom',4326,'GEOMETRY',2);
CREATE INDEX "my_data_geom_geom_idx" ON "public"."my_data" USING GIST ("geom");
ALTER TABLE "public"."my_data" ADD COLUMN "id" VARCHAR;
ALTER TABLE "public"."my_data" ADD COLUMN "datetime" TIMESTAMP;
INSERT INTO "public"."my_data" ("geom" , "id", "datetime") VALUES ('0103000020E6100000010000001B0000003670CC05599B25C03A92CB7F483F54408907944DB9F221C0D9CEF753E315544069D68681BE5B22C0355D864BD1145440984C2580F45C27C062327530C20754409CB396CA942C30C08E6EC42E50F05340F32225E11DCB30C07C98C2D614ED5340075F984C15FC30C0075F984C15EC53400AA1BD9D6AD732C03439A530F50B5440D8BFC6C0170533C00414E74C050F54407650100F7C0E33C0B199D586A60F5440A01BF45DE29634C0B61719B9F6295440838D3D254B5D35C0DC611EC044375440B8A6A26802F135C06705618A2C4154407CBD21E2CF3136C09B1B77FC844554402CD49AE61D3736C076711B0DE045544039117CFD650136C001AEC11005475440DC27DD0AB9C935C0F45E61C1344854406182187FE9BA35C03AF2E08A854854400736A0D273F130C050CF32FAA1625440ED137AA9497230C0441F419D576554401D9FC06CB06E2BC0B1930B183C745440017C2AECC5F92AC01E2006F67A7554401895D40968822AC0986E1283C07654405D44620EE0782AC0E00B92AC54765440FAACE2F3F95C27C0CDCE93B2275354400D2FBCF61DD226C0385BB99C044D54403670CC05599B25C03A92CB7F483F5440', '0', '2004-10-19 10:23:54');
INSERT INTO "public"."my_data" ("geom" , "id", "datetime") VALUES ('0103000020E61000000100000019000000984067B8143E3DC043C2B8B8F40B5440ACEF9DFAC14B3DC0E950B3BEBB0C544070CE88D2DE503DC01B2FDD24060D544034C8A112A4243DC064CC7707650E54409232AD9551103DC079704A40060F5440A630DBCBFBF43CC0E22ABE1BDF0F5440AC95A5A7DFA638C09E34007606325440FE987A2A9D7238C05D165F0DA5335440D1BF9E64C80A38C0FF6D3AC6DC3654409ACC3E07335D36C0578150C82C4454407CBD21E2CF3136C09B1B77FC8445544039117CFD650136C001AEC110054754401EA7E8482ECF35C07F6ABC7493485440DC27DD0AB9C935C0F45E61C134485440A2F3387764C135C09C775737A44754405526CE34BBDB34C047F7C465133854408DF37646C5EA33C0F10FDC85BE2754406D6485236BA431C08C72AF36460054403EE8D9ACFA9C30C07CF2B0506BEE5340F32225E11DCB30C07C98C2D614ED5340FE41CA2BA27737C016B27D9C8ABB5340C442AD69DEA137C05F07CE1951BA5340F9CBEEC9C30A38C07E078C8947C05340898D7238194D38C059C5B4D10CC45340984067B8143E3DC043C2B8B8F40B5440', '1', '2004-10-20 10:23:54');
INSERT INTO "public"."my_data" ("geom" , "id", "datetime") VALUES ('0103000020E61000000100000013000000C0155236C40A38C052F1FFE1D8C75340B244B5A16EC837C014EBB5CD0CC4534073D712F2414F37C0D3BCE3141DBD5340FE41CA2BA27737C016B27D9C8ABB5340A2728C64C30A38C03BFB4402D0B553400C6AB4D7723A3DC0BDA377861D82534058CA32C4B15E3DC062105839B48053402A2097D1F19641C0EAE96F4E58CC5340F0A7C64B379941C07F6ABC7493CC5340E11AE2531A8741C01F2670501DCE5340CED31A45F57241C03EC92059D3CF534009E08D47F1E83FC0EAC3384350F05340DFE755925F713EC036A2858243005440ACEF9DFAC14B3DC0E950B3BEBB0C544034C8A112A4243DC064CC7707650E5440F602E719D4063DC0AE877727A90F54400A68226C78FA3CC0234A7B832F105440A630DBCBFBF43CC0E22ABE1BDF0F5440C0155236C40A38C052F1FFE1D8C75340', '2', '2004-10-21 10:23:54');
INSERT INTO "public"."my_data" ("geom" , "id", "datetime") VALUES ('0103000020E610000001000000110000001B2CBE53855542C051F99E0E805D534049A5CD2EAE0644C03857A7D846865340462575029A0844C0A60A46257586534063B4EEABC4F943C08D992E511D8853409C72BC6BC5E843C0920AAB5C038A5340721D3749863342C03D0220C7DABA53402A2097D1F19641C0EAE96F4E58CC5340E11AE2531A8741C01F2670501DCE534068226C787A7541C0075F984C15D05340CED31A45F57241C03EC92059D3CF534048E17A14AE173DC06B2BF697DD8353400C6AB4D7723A3DC0BDA377861D825340A03E0335AD283FC0314A54553C6953409C6F1F2DEA1541C00EA6095E6A425340BEC11726532541C0BE9F1A2FDD405340EB51B81E853342C0302C67AA4C5A53401B2CBE53855542C051F99E0E805D5340', '3', '2004-10-22 10:23:54');
INSERT INTO "public"."my_data" ("geom" , "id", "datetime") VALUES ('0103000020E610000001000000110000000A4C8422590E46C0B656FB86F03B5340D5E76A2BF60F46C0075F984C153C5340FA28B2217F0346C0CE0A257ADB3D5340BEE6287052F545C01AA33BF2DF3F5340F25A937BB7D244C009CB92853C69534049A5CD2EAE0644C03857A7D84686534063B4EEABC4F943C08D992E511D88534034A2B437F8EA43C0F54A5986388A53409C72BC6BC5E843C0920AAB5C038A534050AF9465883342C0363B85F6B5605340D43E0032881142C02A5884BF7F5D5340F4FDD478E90641C007F01648504453409C6F1F2DEA1541C00EA6095E6A4253404E4E9C88873342C06DC6E4C7471E53403EDF52396E3443C0DC9EAF2DC7FD524044696FF0854143C032772D211FFC52400A4C8422590E46C0B656FB86F03B5340', '4', '2004-10-23 10:23:54');
INSERT INTO "public"."my_data" ("geom" , "id", "datetime") VALUES ('0103000020E6100000010000000D000000BBE9944235C347C0EBF06E7961EE52406ADE718A8EC447C0D122DBF97EEE5240942D6301ECB947C05B59871F60F0524086CAEEF61AAE47C0BDEF3BBB76F252400A4C8422590E46C0B656FB86F03B5340FA28B2217F0346C0CE0A257ADB3D534057EC2FBB27F745C02B1895D409405340BEE6287052F545C01AA33BF2DF3F53401D386744692743C07958A835CDFF52403EDF52396E3443C0DC9EAF2DC7FD5240B9E39237FD0645C0574B4E2543B552400AD7A3703D1245C03A234A7B83B35240BBE9944235C347C0EBF06E7961EE5240', '5', '2004-10-24 10:23:54');
COMMIT;
1 change: 1 addition & 0 deletions .github/workflows/data/noaa-emergency-response.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"id":"noaa-emergency-response", "title": "NOAA Emergency Response Imagery", "description":"NOAA Emergency Response Imagery hosted on AWS Public Dataset.","stac_version":"1.0.0","license":"public-domain","links":[],"extent":{"spatial":{"bbox":[[-180,-90,180,90]]},"temporal":{"interval":[["2005-01-01T00:00:00Z",null]]}}}
Loading