diff --git a/.gitignore b/.gitignore index 378283a07..9f92262f2 100644 --- a/.gitignore +++ b/.gitignore @@ -54,4 +54,6 @@ report.pdf report.xml # infracost .infracost/ -__pycache__/ \ No newline at end of file +__pycache__/ +tmp_schema_test/ +tmp_schema_test.zip diff --git a/datasets/bathy/package/BathyDataFrame.cpp b/datasets/bathy/package/BathyDataFrame.cpp index f6575cd9b..b1cdfaee4 100644 --- a/datasets/bathy/package/BathyDataFrame.cpp +++ b/datasets/bathy/package/BathyDataFrame.cpp @@ -106,41 +106,41 @@ int BathyDataFrame::luaCreate (lua_State* L) BathyDataFrame::BathyDataFrame (lua_State* L, const char* beam_str, BathyFields* _parms, H5Object* _hdf03, H5Object* _hdf09, const char* rqstq_name, BathyMask* _mask): GeoDataFrame(L, LUA_META_NAME, LUA_META_TABLE, { - {"time_ns", &time_ns}, - {"index_ph", &index_ph}, - {"index_seg", &index_seg}, - {"lat_ph", &lat_ph}, - {"lon_ph", &lon_ph}, - {"x_ph", &x_ph}, - {"y_ph", &y_ph}, - {"x_atc", &x_atc}, - {"y_atc", &y_atc}, - {"surface_h", &surface_h}, - {"ortho_h", &ortho_h}, - {"ellipse_h", &ellipse_h}, - {"sigma_thu", &sigma_thu}, - {"sigma_tvu", &sigma_tvu}, - {"processing_flags", &processing_flags}, - {"max_signal_conf", &max_signal_conf}, - {"quality_ph", &quality_ph}, - {"class_ph", &class_ph}, - {"geoid_corr_h", &geoid_corr_h}, + {"time_ns", &time_ns, "GPS nanoseconds"}, + {"index_ph", &index_ph, "photon index"}, + {"index_seg", &index_seg, "segment index"}, + {"lat_ph", &lat_ph, "photon latitude"}, + {"lon_ph", &lon_ph, "photon longitude"}, + {"x_ph", &x_ph, "photon UTM easting (m)"}, + {"y_ph", &y_ph, "photon UTM northing (m)"}, + {"x_atc", &x_atc, "along-track distance (m)"}, + {"y_atc", &y_atc, "across-track distance (m)"}, + {"surface_h", &surface_h, "sea surface height (m)"}, + {"ortho_h", &ortho_h, "orthometric height (m)"}, + {"ellipse_h", &ellipse_h, "ellipsoidal height (m)"}, + {"sigma_thu", &sigma_thu, "total horizontal uncertainty (m)"}, + {"sigma_tvu", &sigma_tvu, "total vertical uncertainty (m)"}, + {"processing_flags", &processing_flags, "processing flags"}, + {"max_signal_conf", &max_signal_conf, "maximum signal confidence"}, + {"quality_ph", &quality_ph, "photon quality flag"}, + {"class_ph", &class_ph, "photon classification"}, + {"geoid_corr_h", &geoid_corr_h, "geoid correction (m)"}, // temporary columns for python code - {"refracted_dZ", &refracted_dZ}, - {"refracted_lat", &refracted_lat}, - {"refracted_lon", &refracted_lon}, - {"subaqueous_sigma_thu", &subaqueous_sigma_thu}, - {"subaqueous_sigma_tvu", &subaqueous_sigma_tvu} + {"refracted_dZ", &refracted_dZ, "refraction depth correction (m)"}, + {"refracted_lat", &refracted_lat, "refraction-corrected latitude"}, + {"refracted_lon", &refracted_lon, "refraction-corrected longitude"}, + {"subaqueous_sigma_thu", &subaqueous_sigma_thu, "subaqueous horizontal uncertainty (m)"}, + {"subaqueous_sigma_tvu", &subaqueous_sigma_tvu, "subaqueous vertical uncertainty (m)"} }, { - {"spot", &spot}, - {"beam", &beam}, - {"track", &track}, - {"pair", &pair}, - {"utm_zone", &utm_zone}, - {"utm_is_north", &utm_is_north}, - {"bounding_polygon_lat",&bounding_polygon_lat}, - {"bounding_polygon_lon",&bounding_polygon_lon} + {"spot", &spot, "spot number (1-6)"}, + {"beam", &beam, "beam name"}, + {"track", &track, "track number"}, + {"pair", &pair, "beam pair"}, + {"utm_zone", &utm_zone, "UTM zone number"}, + {"utm_is_north", &utm_is_north, "UTM hemisphere (1=north, 0=south)"}, + {"bounding_polygon_lat",&bounding_polygon_lat, "bounding polygon latitudes"}, + {"bounding_polygon_lon",&bounding_polygon_lon, "bounding polygon longitudes"} }, Icesat2Fields::defaultEGM(_parms->granuleFields.version.value)), beam(beam_str), diff --git a/datasets/casals/package/Casals1bDataFrame.cpp b/datasets/casals/package/Casals1bDataFrame.cpp index 36e67e4d1..3d6abf62a 100644 --- a/datasets/casals/package/Casals1bDataFrame.cpp +++ b/datasets/casals/package/Casals1bDataFrame.cpp @@ -61,6 +61,9 @@ const struct luaL_Reg Casals1bDataFrame::LUA_META_TABLE[] = { *----------------------------------------------------------------------------*/ int Casals1bDataFrame::luaCreate (lua_State* L) { + if(lua_gettop(L) == 0) + return createLuaObject(L, new Casals1bDataFrame(L, NULL, NULL, NULL)); + CasalsFields* _parms = NULL; H5Object* _hdf1b = NULL; @@ -89,33 +92,33 @@ int Casals1bDataFrame::luaCreate (lua_State* L) Casals1bDataFrame::Casals1bDataFrame (lua_State* L, CasalsFields* _parms, H5Object* _hdf1b, const char* outq_name): GeoDataFrame(L, LUA_META_NAME, LUA_META_TABLE, { - {"time_ns", &time_ns}, - {"latitude", &latitude}, - {"longitude", &longitude}, - {"refh", &refh}, + {"time_ns", &time_ns, "GPS nanoseconds"}, + {"latitude", &latitude, "latitude"}, + {"longitude", &longitude, "longitude"}, + {"refh", &refh, "reference height (m)"}, }, { - {"granule", &granule} + {"granule", &granule, "source granule name"} }, - CasalsFields::crsITRF2020()), - granule(_hdf1b->name, META_SOURCE_ID), + _parms ? CasalsFields::crsITRF2020() : NULL), + granule(_hdf1b ? _hdf1b->name : "", META_SOURCE_ID), active(false), readerPid(NULL), - readTimeoutMs(_parms->readTimeout.value * 1000), + readTimeoutMs(_parms ? _parms->readTimeout.value * 1000 : 0), outQ(NULL), parms(_parms), hdf1b(_hdf1b), - dfKey(1) + dfKey(_parms ? 1 : 0) { - assert(_parms); - assert(_hdf1b); + /* Call Parent Class Initialization of GeoColumns */ + populateGeoColumns(); + + /* Schema-only: skip all runtime initialization */ + if(!_parms) return; /* Setup Output Queue (for messages) */ if(outq_name) outQ = new Publisher(outq_name); - /* Call Parent Class Initialization of GeoColumns */ - populateGeoColumns(); - /* Set Thread Specific Trace ID for H5Coro */ EventLib::stashId (traceId); @@ -132,8 +135,8 @@ Casals1bDataFrame::~Casals1bDataFrame (void) active.store(false); delete readerPid; delete outQ; - parms->releaseLuaObject(); - hdf1b->releaseLuaObject(); + if(parms) parms->releaseLuaObject(); + if(hdf1b) hdf1b->releaseLuaObject(); } /*---------------------------------------------------------------------------- diff --git a/datasets/gedi/package/Gedi01bDataFrame.cpp b/datasets/gedi/package/Gedi01bDataFrame.cpp index b3de1f94f..a9d378b2c 100644 --- a/datasets/gedi/package/Gedi01bDataFrame.cpp +++ b/datasets/gedi/package/Gedi01bDataFrame.cpp @@ -57,6 +57,9 @@ const struct luaL_Reg Gedi01bDataFrame::LUA_META_TABLE[] = { *----------------------------------------------------------------------------*/ int Gedi01bDataFrame::luaCreate (lua_State* L) { + if(lua_gettop(L) == 0) + return createLuaObject(L, new Gedi01bDataFrame(L, NULL, NULL, NULL, NULL)); + GediFields* _parms = NULL; H5Object* _hdf01b = NULL; @@ -86,18 +89,18 @@ int Gedi01bDataFrame::luaCreate (lua_State* L) Gedi01bDataFrame::Gedi01bDataFrame (lua_State* L, const char* beam_str, GediFields* _parms, H5Object* _hdf01b, const char* outq_name): GediDataFrame(L, LUA_META_NAME, LUA_META_TABLE, { - {"shot_number", &shot_number}, - {"time_ns", &time_ns}, - {"latitude", &latitude}, - {"longitude", &longitude}, - {"elevation_start", &elevation_start}, - {"elevation_stop", &elevation_stop}, - {"solar_elevation", &solar_elevation}, - {"tx_size", &tx_size}, - {"rx_size", &rx_size}, - {"flags", &flags}, - {"tx_waveform", &tx_waveform}, - {"rx_waveform", &rx_waveform} + {"shot_number", &shot_number, "unique shot identifier"}, + {"time_ns", &time_ns, "GPS nanoseconds"}, + {"latitude", &latitude, "latitude"}, + {"longitude", &longitude, "longitude"}, + {"elevation_start", &elevation_start, "waveform start elevation (m)"}, + {"elevation_stop", &elevation_stop, "waveform stop elevation (m)"}, + {"solar_elevation", &solar_elevation, "solar elevation angle (deg)"}, + {"tx_size", &tx_size, "transmit waveform sample count"}, + {"rx_size", &rx_size, "receive waveform sample count"}, + {"flags", &flags, "quality flags"}, + {"tx_waveform", &tx_waveform, "transmit waveform"}, + {"rx_waveform", &rx_waveform, "receive waveform"} }, _parms, _hdf01b, @@ -107,6 +110,9 @@ Gedi01bDataFrame::Gedi01bDataFrame (lua_State* L, const char* beam_str, GediFiel /* Call Parent Class Initialization of GeoColumns */ populateGeoColumns(); + /* Schema-only: skip all runtime initialization */ + if(!_parms) return; + /* Set Thread Specific Trace ID for H5Coro */ EventLib::stashId (traceId); diff --git a/datasets/gedi/package/Gedi02aDataFrame.cpp b/datasets/gedi/package/Gedi02aDataFrame.cpp index 3e20f410e..3d4756060 100644 --- a/datasets/gedi/package/Gedi02aDataFrame.cpp +++ b/datasets/gedi/package/Gedi02aDataFrame.cpp @@ -53,6 +53,9 @@ const struct luaL_Reg Gedi02aDataFrame::LUA_META_TABLE[] = { *----------------------------------------------------------------------------*/ int Gedi02aDataFrame::luaCreate (lua_State* L) { + if(lua_gettop(L) == 0) + return createLuaObject(L, new Gedi02aDataFrame(L, NULL, NULL, NULL, NULL)); + GediFields* _parms = NULL; H5Object* _hdf02a = NULL; @@ -82,15 +85,15 @@ int Gedi02aDataFrame::luaCreate (lua_State* L) Gedi02aDataFrame::Gedi02aDataFrame (lua_State* L, const char* beam_str, GediFields* _parms, H5Object* _hdf02a, const char* outq_name): GediDataFrame(L, LUA_META_NAME, LUA_META_TABLE, { - {"shot_number", &shot_number}, - {"time_ns", &time_ns}, - {"latitude", &latitude}, - {"longitude", &longitude}, - {"elevation_lm", &elevation_lm}, - {"elevation_hr", &elevation_hr}, - {"solar_elevation", &solar_elevation}, - {"sensitivity", &sensitivity}, - {"flags", &flags} + {"shot_number", &shot_number, "unique shot identifier"}, + {"time_ns", &time_ns, "GPS nanoseconds"}, + {"latitude", &latitude, "latitude"}, + {"longitude", &longitude, "longitude"}, + {"elevation_lm", &elevation_lm, "elevation lowest mode (m)"}, + {"elevation_hr", &elevation_hr, "elevation highest return (m)"}, + {"solar_elevation", &solar_elevation, "solar elevation angle (deg)"}, + {"sensitivity", &sensitivity, "beam sensitivity"}, + {"flags", &flags, "quality flags"} }, _parms, _hdf02a, @@ -100,6 +103,9 @@ Gedi02aDataFrame::Gedi02aDataFrame (lua_State* L, const char* beam_str, GediFiel /* Call Parent Class Initialization of GeoColumns */ populateGeoColumns(); + /* Schema-only: skip all runtime initialization */ + if(!_parms) return; + /* Set Thread Specific Trace ID for H5Coro */ EventLib::stashId (traceId); diff --git a/datasets/gedi/package/Gedi04aDataFrame.cpp b/datasets/gedi/package/Gedi04aDataFrame.cpp index 7cf85a393..39f27f9ec 100644 --- a/datasets/gedi/package/Gedi04aDataFrame.cpp +++ b/datasets/gedi/package/Gedi04aDataFrame.cpp @@ -53,6 +53,9 @@ const struct luaL_Reg Gedi04aDataFrame::LUA_META_TABLE[] = { *----------------------------------------------------------------------------*/ int Gedi04aDataFrame::luaCreate (lua_State* L) { + if(lua_gettop(L) == 0) + return createLuaObject(L, new Gedi04aDataFrame(L, NULL, NULL, NULL, NULL)); + GediFields* _parms = NULL; H5Object* _hdf04a = NULL; @@ -82,15 +85,15 @@ int Gedi04aDataFrame::luaCreate (lua_State* L) Gedi04aDataFrame::Gedi04aDataFrame (lua_State* L, const char* beam_str, GediFields* _parms, H5Object* _hdf04a, const char* outq_name): GediDataFrame(L, LUA_META_NAME, LUA_META_TABLE, { - {"shot_number", &shot_number}, - {"time_ns", &time_ns}, - {"latitude", &latitude}, - {"longitude", &longitude}, - {"agbd", &agbd}, - {"elevation", &elevation}, - {"solar_elevation", &solar_elevation}, - {"sensitivity", &sensitivity}, - {"flags", &flags} + {"shot_number", &shot_number, "unique shot identifier"}, + {"time_ns", &time_ns, "GPS nanoseconds"}, + {"latitude", &latitude, "latitude"}, + {"longitude", &longitude, "longitude"}, + {"agbd", &agbd, "above ground biomass density (Mg/ha)"}, + {"elevation", &elevation, "elevation (m)"}, + {"solar_elevation", &solar_elevation, "solar elevation angle (deg)"}, + {"sensitivity", &sensitivity, "beam sensitivity"}, + {"flags", &flags, "quality flags"} }, _parms, _hdf04a, @@ -100,6 +103,9 @@ Gedi04aDataFrame::Gedi04aDataFrame (lua_State* L, const char* beam_str, GediFiel /* Call Parent Class Initialization of GeoColumns */ populateGeoColumns(); + /* Schema-only: skip all runtime initialization */ + if(!_parms) return; + /* Set Thread Specific Trace ID for H5Coro */ EventLib::stashId (traceId); diff --git a/datasets/gedi/package/GediDataFrame.cpp b/datasets/gedi/package/GediDataFrame.cpp index ef94d3cb7..191963b52 100644 --- a/datasets/gedi/package/GediDataFrame.cpp +++ b/datasets/gedi/package/GediDataFrame.cpp @@ -51,28 +51,28 @@ GediFields* _parms, H5Object* _hdf, const char* beam_str, const char* outq_name): GeoDataFrame(L, meta_name, meta_table, column_list, { - {"beam", &beam}, - {"orbit", &orbit}, - {"track", &track}, - {"granule", &granule} + {"beam", &beam, "beam number"}, + {"orbit", &orbit, "orbit number"}, + {"track", &track, "track number"}, + {"granule", &granule, "source granule name"} }, getCRS()), beam(0, META_COLUMN), - orbit(static_cast(_parms->granule_fields.orbit.value), META_COLUMN), - track(static_cast(_parms->granule_fields.track.value), META_COLUMN), - granule(_hdf->name, META_SOURCE_ID), + orbit(_parms ? static_cast(_parms->granule_fields.orbit.value) : 0, META_COLUMN), + track(_parms ? static_cast(_parms->granule_fields.track.value) : 0, META_COLUMN), + granule(_hdf ? _hdf->name : "", META_SOURCE_ID), active(false), readerPid(NULL), - readTimeoutMs(_parms->readTimeout.value * 1000), + readTimeoutMs(_parms ? _parms->readTimeout.value * 1000 : 0), outQ(NULL), parms(_parms), hdf(_hdf), dfKey(0), - beamStr(StringLib::duplicate(beam_str)), + beamStr(beam_str ? StringLib::duplicate(beam_str) : NULL), group{0} { - assert(_parms); - assert(_hdf); + /* Schema-only: skip all runtime initialization */ + if(!_parms) return; /* Resolve Beam */ const int beam_index = beamIndexFromString(beam_str); diff --git a/datasets/icesat2/package/Atl03DataFrame.cpp b/datasets/icesat2/package/Atl03DataFrame.cpp index 5af959ad7..079636dfe 100644 --- a/datasets/icesat2/package/Atl03DataFrame.cpp +++ b/datasets/icesat2/package/Atl03DataFrame.cpp @@ -52,6 +52,7 @@ const struct luaL_Reg Atl03DataFrame::LUA_META_TABLE[] = { {NULL, NULL} }; + /****************************************************************************** * ATL03 READER CLASS ******************************************************************************/ @@ -61,6 +62,12 @@ const struct luaL_Reg Atl03DataFrame::LUA_META_TABLE[] = { *----------------------------------------------------------------------------*/ int Atl03DataFrame::luaCreate (lua_State* L) { + /* Schema-only: no arguments creates a lightweight instance for schema registration */ + if(lua_gettop(L) == 0) + { + return createLuaObject(L, new Atl03DataFrame(L, NULL, NULL, NULL, NULL, NULL, NULL)); + } + Icesat2Fields* _parms = NULL; H5Object* _hdf03 = NULL; H5Object* _hdf08 = NULL; @@ -96,86 +103,69 @@ int Atl03DataFrame::luaCreate (lua_State* L) Atl03DataFrame::Atl03DataFrame (lua_State* L, const char* beam_str, Icesat2Fields* _parms, H5Object* _hdf03, H5Object* _hdf08, H5Object* _hdf24, const char* outq_name): GeoDataFrame(L, LUA_META_NAME, LUA_META_TABLE, { - {"time_ns", &time_ns}, - {"latitude", &latitude}, - {"longitude", &longitude}, - {"segment_id", &segment_id}, - {"x_atc", &x_atc}, - {"y_atc", &y_atc}, - {"height", &height}, - {"solar_elevation", &solar_elevation}, - {"background_rate", &background_rate}, - {"spacecraft_velocity", &spacecraft_velocity}, - {"atl03_cnf", &atl03_cnf}, - {"quality_ph", &quality_ph}, - {"ph_index", &ph_index}, + {"time_ns", &time_ns, "GPS nanoseconds"}, + {"latitude", &latitude, "latitude (EPSG:7912)"}, + {"longitude", &longitude, "longitude (EPSG:7912)"}, + {"segment_id", &segment_id, "segment identifier"}, + {"x_atc", &x_atc, "along-track distance (m)"}, + {"y_atc", &y_atc, "across-track distance (m)"}, + {"height", &height, "photon height (m)"}, + {"solar_elevation", &solar_elevation, "solar elevation angle (deg)"}, + {"background_rate", &background_rate, "background photon rate (MHz)"}, + {"spacecraft_velocity", &spacecraft_velocity, "spacecraft velocity (m/s)"}, + {"atl03_cnf", &atl03_cnf, "ATL03 signal confidence"}, + {"quality_ph", &quality_ph, "photon quality flag"}, + {"ph_index", &ph_index, "photon index within segment"}, }, { - {"spot", &spot}, - {"cycle", &cycle}, - {"region", ®ion}, - {"rgt", &rgt}, - {"gt", >}, - {"granule", &granule} + {"spot", &spot, "spot number (1-6)"}, + {"cycle", &cycle, "orbital cycle"}, + {"region", ®ion, "region number"}, + {"rgt", &rgt, "reference ground track"}, + {"gt", >, "ground track"}, + {"granule", &granule, "source granule name"} }, - Icesat2Fields::defaultITRF(_parms->granuleFields.version.value)), + _parms ? Icesat2Fields::defaultITRF(_parms->granuleFields.version.value) : NULL), spot(0, META_COLUMN), - cycle(_parms->granuleFields.cycle.value, META_COLUMN), - region(_parms->granuleFields.region.value, META_COLUMN), - rgt(_parms->granuleFields.rgt.value, META_COLUMN), + cycle(_parms ? _parms->granuleFields.cycle.value : 0, META_COLUMN), + region(_parms ? _parms->granuleFields.region.value : 0, META_COLUMN), + rgt(_parms ? _parms->granuleFields.rgt.value : 0, META_COLUMN), gt(0, META_COLUMN), - granule(_hdf03->name, META_SOURCE_ID), + granule(_hdf03 ? _hdf03->name : "", META_SOURCE_ID), active(false), readerPid(NULL), - readTimeoutMs(_parms->readTimeout.value * 1000), - signalConfColIndex(H5Coro::ALL_COLS), - beam(FString("%s", beam_str).c_str(true)), + readTimeoutMs(_parms ? _parms->readTimeout.value * 1000 : 0), + signalConfColIndex(_parms ? H5Coro::ALL_COLS : 0), + beam(beam_str ? FString("%s", beam_str).c_str(true) : NULL), outQ(NULL), parms(_parms), hdf03(_hdf03), hdf08(_hdf08), hdf24(_hdf24), - usePodppd(parms->podppdMask.value != 0x00), - useYapc006(parms->stages[Icesat2Fields::STAGE_YAPC] && (parms->yapc.version.value == 0) && (parms->granuleFields.version.value == 6)), - useYapc007(parms->stages[Icesat2Fields::STAGE_YAPC] && (parms->yapc.version.value == 0) && (parms->granuleFields.version.value >= 7)), - useGeoid(parms->datum.value == MathLib::EGM08) + usePodppd(_parms ? (parms->podppdMask.value != 0x00) : false), + useYapc006(_parms ? (parms->stages[Icesat2Fields::STAGE_YAPC] && (parms->yapc.version.value == 0) && (parms->granuleFields.version.value == 6)) : false), + useYapc007(_parms ? (parms->stages[Icesat2Fields::STAGE_YAPC] && (parms->yapc.version.value == 0) && (parms->granuleFields.version.value >= 7)) : false), + useGeoid(_parms ? (parms->datum.value == MathLib::EGM08) : false) { - assert(_parms); - assert(_hdf03); - - /* Set Optional PhoREAL Columns */ - if(parms->stages[Icesat2Fields::STAGE_PHOREAL]) - { - addColumn("relief", &relief, false); - addColumn("landcover", &landcover, false); - addColumn("snowcover", &snowcover, false); - } - - /* Set Optional YAPC Columns */ - if(parms->stages[Icesat2Fields::STAGE_YAPC]) - { - addColumn("yapc_score", &yapc_score, false); - } + /* Register conditional columns (enabled=false for schema-only mode) */ + const bool schema_only = (_parms == NULL); + addColumn("relief", &relief, false, "PhoREAL relief", "stages.phoreal", schema_only ? false : parms->stages[Icesat2Fields::STAGE_PHOREAL]); + addColumn("landcover", &landcover, false, "land cover classification", "stages.phoreal", schema_only ? false : parms->stages[Icesat2Fields::STAGE_PHOREAL]); + addColumn("snowcover", &snowcover, false, "snow cover classification", "stages.phoreal", schema_only ? false : parms->stages[Icesat2Fields::STAGE_PHOREAL]); + addColumn("yapc_score", &yapc_score, false, "YAPC score", "stages.yapc", schema_only ? false : parms->stages[Icesat2Fields::STAGE_YAPC]); + addColumn("atl08_class", &atl08_class, false, "ATL08 photon classification", "stages.atl08", schema_only ? false : parms->stages[Icesat2Fields::STAGE_ATL08]); + addColumn("atl24_class", &atl24_class, false, "ATL24 photon classification", "stages.atl24", schema_only ? false : parms->stages[Icesat2Fields::STAGE_ATL24]); + addColumn("atl24_confidence", &atl24_confidence, false, "ATL24 classification confidence", "stages.atl24", schema_only ? false : parms->stages[Icesat2Fields::STAGE_ATL24]); - /* Set Optional ATL08 Columns */ - if(parms->stages[Icesat2Fields::STAGE_ATL08]) - { - addColumn("atl08_class", &atl08_class, false); - } + /* Call Parent Class Initialization of GeoColumns */ + populateGeoColumns(); - /* Set Optional ATL24 Columns */ - if(parms->stages[Icesat2Fields::STAGE_ATL24]) - { - addColumn("atl24_class", &atl24_class, false); - addColumn("atl24_confidence", &atl24_confidence, false); - } + /* Schema-only: skip all runtime initialization */ + if(schema_only) return; /* Set CRS */ if(useGeoid) crs = Icesat2Fields::defaultEGM(_parms->granuleFields.version.value); - /* Call Parent Class Initialization of GeoColumns */ - populateGeoColumns(); - /* Set Signal Confidence Index */ if(parms->surfaceType != Icesat2Fields::SRT_DYNAMIC) { @@ -205,8 +195,8 @@ Atl03DataFrame::~Atl03DataFrame (void) delete readerPid; delete [] beam; delete outQ; - parms->releaseLuaObject(); - hdf03->releaseLuaObject(); + if(parms) parms->releaseLuaObject(); + if(hdf03) hdf03->releaseLuaObject(); if(hdf08) hdf08->releaseLuaObject(); if(hdf24) hdf24->releaseLuaObject(); } diff --git a/datasets/icesat2/package/Atl06DataFrame.cpp b/datasets/icesat2/package/Atl06DataFrame.cpp index 1db6becd8..ed1b8f602 100644 --- a/datasets/icesat2/package/Atl06DataFrame.cpp +++ b/datasets/icesat2/package/Atl06DataFrame.cpp @@ -44,6 +44,7 @@ const struct luaL_Reg Atl06DataFrame::LUA_META_TABLE[] = { {NULL, NULL} }; + /****************************************************************************** * ATL06 DATAFRAME CLASS ******************************************************************************/ @@ -53,6 +54,9 @@ const struct luaL_Reg Atl06DataFrame::LUA_META_TABLE[] = { *----------------------------------------------------------------------------*/ int Atl06DataFrame::luaCreate (lua_State* L) { + if(lua_gettop(L) == 0) + return createLuaObject(L, new Atl06DataFrame(L, NULL, NULL, NULL, NULL)); + Icesat2Fields* _parms = NULL; H5Object* _hdf06 = NULL; @@ -82,52 +86,55 @@ int Atl06DataFrame::luaCreate (lua_State* L) Atl06DataFrame::Atl06DataFrame (lua_State* L, const char* beam_str, Icesat2Fields* _parms, H5Object* _hdf06, const char* outq_name): GeoDataFrame(L, LUA_META_NAME, LUA_META_TABLE, { - {"time_ns", &time_ns}, - {"latitude", &latitude}, - {"longitude", &longitude}, - {"x_atc", &x_atc}, - {"y_atc", &y_atc}, - {"h_li", &h_li}, - {"h_li_sigma", &h_li_sigma}, - {"sigma_geo_h", &sigma_geo_h}, - {"atl06_quality_summary", &atl06_quality_summary}, - {"segment_id", &segment_id}, - {"seg_azimuth", &seg_azimuth}, - {"dh_fit_dx", &dh_fit_dx}, - {"h_robust_sprd", &h_robust_sprd}, - {"w_surface_window_final", &w_surface_window_final}, - {"bsnow_conf", &bsnow_conf}, - {"bsnow_h", &bsnow_h}, - {"r_eff", &r_eff}, - {"tide_ocean", &tide_ocean}, - {"n_fit_photons", &n_fit_photons} + {"time_ns", &time_ns, "GPS nanoseconds"}, + {"latitude", &latitude, "latitude (EPSG:7912)"}, + {"longitude", &longitude, "longitude (EPSG:7912)"}, + {"x_atc", &x_atc, "along-track distance (m)"}, + {"y_atc", &y_atc, "across-track distance (m)"}, + {"h_li", &h_li, "land ice height (m)"}, + {"h_li_sigma", &h_li_sigma, "land ice height uncertainty (m)"}, + {"sigma_geo_h", &sigma_geo_h, "total geolocation uncertainty (m)"}, + {"atl06_quality_summary", &atl06_quality_summary, "quality summary flag"}, + {"segment_id", &segment_id, "segment identifier"}, + {"seg_azimuth", &seg_azimuth, "segment azimuth (deg)"}, + {"dh_fit_dx", &dh_fit_dx, "along-track slope"}, + {"h_robust_sprd", &h_robust_sprd, "robust spread of heights (m)"}, + {"w_surface_window_final", &w_surface_window_final, "final surface window width (m)"}, + {"bsnow_conf", &bsnow_conf, "blowing snow confidence"}, + {"bsnow_h", &bsnow_h, "blowing snow layer height (m)"}, + {"r_eff", &r_eff, "effective reflectance"}, + {"tide_ocean", &tide_ocean, "ocean tide correction (m)"}, + {"n_fit_photons", &n_fit_photons, "number of fit photons"} }, { - {"spot", &spot}, - {"cycle", &cycle}, - {"region", ®ion}, - {"rgt", &rgt}, - {"gt", >}, - {"granule", &granule} + {"spot", &spot, "spot number (1-6)"}, + {"cycle", &cycle, "orbital cycle"}, + {"region", ®ion, "region number"}, + {"rgt", &rgt, "reference ground track"}, + {"gt", >, "ground track"}, + {"granule", &granule, "source granule name"} }, - Icesat2Fields::defaultITRF(_parms->granuleFields.version.value)), + _parms ? Icesat2Fields::defaultITRF(_parms->granuleFields.version.value) : NULL), spot(0, META_COLUMN), - cycle(_parms->granuleFields.cycle.value, META_COLUMN), - region(_parms->granuleFields.region.value, META_COLUMN), - rgt(_parms->granuleFields.rgt.value, META_COLUMN), + cycle(_parms ? _parms->granuleFields.cycle.value : 0, META_COLUMN), + region(_parms ? _parms->granuleFields.region.value : 0, META_COLUMN), + rgt(_parms ? _parms->granuleFields.rgt.value : 0, META_COLUMN), gt(0, META_COLUMN), - granule(_hdf06->name, META_SOURCE_ID), + granule(_hdf06 ? _hdf06->name : "", META_SOURCE_ID), active(false), readerPid(NULL), - readTimeoutMs(_parms->readTimeout.value * 1000), + readTimeoutMs(_parms ? _parms->readTimeout.value * 1000 : 0), outQ(NULL), parms(_parms), hdf06(_hdf06), dfKey(0), - beam(StringLib::duplicate(beam_str)) + beam(beam_str ? StringLib::duplicate(beam_str) : NULL) { - assert(_parms); - assert(_hdf06); + /* Call Parent Class Initialization of GeoColumns */ + populateGeoColumns(); + + /* Schema-only: skip all runtime initialization */ + if(!_parms) return; /* Calculate Key */ dfKey = Icesat2Fields::calculateBeamKey(beam); @@ -135,9 +142,6 @@ Atl06DataFrame::Atl06DataFrame (lua_State* L, const char* beam_str, Icesat2Field /* Optional Output Queue (for messages) */ if(outq_name) outQ = new Publisher(outq_name); - /* Call Parent Class Initialization of GeoColumns */ - populateGeoColumns(); - /* Set Thread Specific Trace ID for H5Coro */ EventLib::stashId (traceId); @@ -155,8 +159,8 @@ Atl06DataFrame::~Atl06DataFrame (void) delete readerPid; delete [] beam; delete outQ; - parms->releaseLuaObject(); - hdf06->releaseLuaObject(); + if(parms) parms->releaseLuaObject(); + if(hdf06) hdf06->releaseLuaObject(); } /*---------------------------------------------------------------------------- diff --git a/datasets/icesat2/package/Atl08DataFrame.cpp b/datasets/icesat2/package/Atl08DataFrame.cpp index 837482ca4..e0c601124 100644 --- a/datasets/icesat2/package/Atl08DataFrame.cpp +++ b/datasets/icesat2/package/Atl08DataFrame.cpp @@ -44,6 +44,7 @@ const struct luaL_Reg Atl08DataFrame::LUA_META_TABLE[] = { {NULL, NULL} }; + /****************************************************************************** * ATL08 DATAFRAME CLASS ******************************************************************************/ @@ -53,6 +54,9 @@ const struct luaL_Reg Atl08DataFrame::LUA_META_TABLE[] = { *----------------------------------------------------------------------------*/ int Atl08DataFrame::luaCreate (lua_State* L) { + if(lua_gettop(L) == 0) + return createLuaObject(L, new Atl08DataFrame(L, NULL, NULL, NULL, NULL)); + Icesat2Fields* _parms = NULL; H5Object* _hdf08 = NULL; @@ -82,54 +86,61 @@ int Atl08DataFrame::luaCreate (lua_State* L) Atl08DataFrame::Atl08DataFrame (lua_State* L, const char* beam_str, Icesat2Fields* _parms, H5Object* _hdf08, const char* outq_name): GeoDataFrame(L, LUA_META_NAME, LUA_META_TABLE, { - {"time_ns", &time_ns}, - {"latitude", &latitude}, - {"longitude", &longitude}, - {"segment_id_beg", &segment_id_beg}, - {"segment_landcover", &segment_landcover}, - {"segment_snowcover", &segment_snowcover}, - {"n_seg_ph", &n_seg_ph}, - {"solar_elevation", &solar_elevation}, - {"terrain_slope", &terrain_slope}, - {"n_te_photons", &n_te_photons}, - {"h_te_uncertainty", &h_te_uncertainty}, - {"h_te_median", &h_te_median}, - {"h_canopy", &h_canopy}, - {"h_canopy_uncertainty", &h_canopy_uncertainty}, - {"segment_cover", &segment_cover}, - {"n_ca_photons", &n_ca_photons}, - {"h_max_canopy", &h_max_canopy}, - {"h_min_canopy", &h_min_canopy}, - {"h_mean_canopy", &h_mean_canopy}, - {"canopy_openness", &canopy_openness}, - {"canopy_h_metrics", &canopy_h_metrics} + {"time_ns", &time_ns, "GPS nanoseconds"}, + {"latitude", &latitude, "latitude (EPSG:7912)"}, + {"longitude", &longitude, "longitude (EPSG:7912)"}, + {"segment_id_beg", &segment_id_beg, "beginning segment identifier"}, + {"segment_landcover", &segment_landcover, "land cover classification"}, + {"segment_snowcover", &segment_snowcover, "snow cover classification"}, + {"n_seg_ph", &n_seg_ph, "number of photons in segment"}, + {"solar_elevation", &solar_elevation, "solar elevation angle (deg)"}, + {"terrain_slope", &terrain_slope, "terrain slope (deg)"}, + {"n_te_photons", &n_te_photons, "number of terrain photons"}, + {"h_te_uncertainty", &h_te_uncertainty, "terrain height uncertainty (m)"}, + {"h_te_median", &h_te_median, "median terrain height (m)"}, + {"h_canopy", &h_canopy, "canopy height (m)"}, + {"h_canopy_uncertainty", &h_canopy_uncertainty, "canopy height uncertainty (m)"}, + {"segment_cover", &segment_cover, "canopy cover fraction"}, + {"n_ca_photons", &n_ca_photons, "number of canopy photons"}, + {"h_max_canopy", &h_max_canopy, "maximum canopy height (m)"}, + {"h_min_canopy", &h_min_canopy, "minimum canopy height (m)"}, + {"h_mean_canopy", &h_mean_canopy, "mean canopy height (m)"}, + {"canopy_openness", &canopy_openness, "canopy openness"}, + {"canopy_h_metrics", &canopy_h_metrics, "canopy height percentile metrics"} }, { - {"spot", &spot}, - {"cycle", &cycle}, - {"region", ®ion}, - {"rgt", &rgt}, - {"gt", >}, - {"granule", &granule} + {"spot", &spot, "spot number (1-6)"}, + {"cycle", &cycle, "orbital cycle"}, + {"region", ®ion, "region number"}, + {"rgt", &rgt, "reference ground track"}, + {"gt", >, "ground track"}, + {"granule", &granule, "source granule name"} }, - Icesat2Fields::defaultITRF(_parms->granuleFields.version.value)), + _parms ? Icesat2Fields::defaultITRF(_parms->granuleFields.version.value) : NULL), spot(0, META_COLUMN), - cycle(_parms->granuleFields.cycle.value, META_COLUMN), - region(_parms->granuleFields.region.value, META_COLUMN), - rgt(_parms->granuleFields.rgt.value, META_COLUMN), + cycle(_parms ? _parms->granuleFields.cycle.value : 0, META_COLUMN), + region(_parms ? _parms->granuleFields.region.value : 0, META_COLUMN), + rgt(_parms ? _parms->granuleFields.rgt.value : 0, META_COLUMN), gt(0, META_COLUMN), - granule(_hdf08->name, META_SOURCE_ID), + granule(_hdf08 ? _hdf08->name : "", META_SOURCE_ID), active(false), readerPid(NULL), - readTimeoutMs(_parms->readTimeout.value * 1000), + readTimeoutMs(_parms ? _parms->readTimeout.value * 1000 : 0), outQ(NULL), parms(_parms), hdf08(_hdf08), dfKey(0), - beam(StringLib::duplicate(beam_str)) + beam(beam_str ? StringLib::duplicate(beam_str) : NULL) { - assert(_parms); - assert(_hdf08); + const bool schema_only = (_parms == NULL); + addColumn("te_quality_score", &te_quality_score, false, "terrain quality score", "phoreal.te_quality_filter", schema_only ? false : parms->phoreal.te_quality_filter_provided); + addColumn("can_quality_score", &can_quality_score, false, "canopy quality score", "phoreal.can_quality_filter", schema_only ? false : parms->phoreal.can_quality_filter_provided); + + /* Call Parent Class Initialization of GeoColumns */ + populateGeoColumns(); + + /* Schema-only: skip all runtime initialization */ + if(schema_only) return; /* Calculate Key */ dfKey = Icesat2Fields::calculateBeamKey(beam); @@ -137,19 +148,6 @@ Atl08DataFrame::Atl08DataFrame (lua_State* L, const char* beam_str, Icesat2Field /* Optional Output Queue (for messages) */ if(outq_name) outQ = new Publisher(outq_name); - /* Optional Quality Score Columns */ - if(parms->phoreal.te_quality_filter_provided) - { - addColumn("te_quality_score", &te_quality_score, false); - } - if(parms->phoreal.can_quality_filter_provided) - { - addColumn("can_quality_score", &can_quality_score, false); - } - - /* Call Parent Class Initialization of GeoColumns */ - populateGeoColumns(); - /* Set Thread Specific Trace ID for H5Coro */ EventLib::stashId(traceId); @@ -167,8 +165,8 @@ Atl08DataFrame::~Atl08DataFrame (void) delete readerPid; delete [] beam; delete outQ; - parms->releaseLuaObject(); - hdf08->releaseLuaObject(); + if(parms) parms->releaseLuaObject(); + if(hdf08) hdf08->releaseLuaObject(); } /*---------------------------------------------------------------------------- diff --git a/datasets/icesat2/package/Atl13DataFrame.cpp b/datasets/icesat2/package/Atl13DataFrame.cpp index 678a7d734..7c1bc4b74 100644 --- a/datasets/icesat2/package/Atl13DataFrame.cpp +++ b/datasets/icesat2/package/Atl13DataFrame.cpp @@ -52,6 +52,7 @@ const struct luaL_Reg Atl13DataFrame::LUA_META_TABLE[] = { {NULL, NULL} }; + /****************************************************************************** * ATL03 READER CLASS ******************************************************************************/ @@ -61,6 +62,9 @@ const struct luaL_Reg Atl13DataFrame::LUA_META_TABLE[] = { *----------------------------------------------------------------------------*/ int Atl13DataFrame::luaCreate (lua_State* L) { + if(lua_gettop(L) == 0) + return createLuaObject(L, new Atl13DataFrame(L, NULL, NULL, NULL, NULL)); + Icesat2Fields* _parms = NULL; H5Object* _hdf13 = NULL; @@ -90,42 +94,43 @@ int Atl13DataFrame::luaCreate (lua_State* L) Atl13DataFrame::Atl13DataFrame (lua_State* L, const char* beam_str, Icesat2Fields* _parms, H5Object* _hdf13, const char* outq_name): GeoDataFrame(L, LUA_META_NAME, LUA_META_TABLE, { - {"time_ns", &time_ns}, - {"latitude", &latitude}, - {"longitude", &longitude}, - {"segment_id_beg", &segment_id_beg}, - {"ht_ortho", &ht_ortho}, - {"ht_water_surf", &ht_water_surf}, - {"stdev_water_surf", &stdev_water_surf}, - {"water_depth", &water_depth}, + {"time_ns", &time_ns, "GPS nanoseconds"}, + {"latitude", &latitude, "latitude (EPSG:7912)"}, + {"longitude", &longitude, "longitude (EPSG:7912)"}, + {"segment_id_beg", &segment_id_beg, "beginning segment identifier"}, + {"ht_ortho", &ht_ortho, "orthometric height (m)"}, + {"ht_water_surf", &ht_water_surf, "water surface height (m)"}, + {"stdev_water_surf", &stdev_water_surf, "water surface height std dev (m)"}, + {"water_depth", &water_depth, "water depth (m)"}, }, { - {"spot", &spot}, - {"cycle", &cycle}, - {"rgt", &rgt}, - {"gt", >}, - {"granule", &granule} + {"spot", &spot, "spot number (1-6)"}, + {"cycle", &cycle, "orbital cycle"}, + {"rgt", &rgt, "reference ground track"}, + {"gt", >, "ground track"}, + {"granule", &granule, "source granule name"} }, - Icesat2Fields::defaultEGM(_parms->granuleFields.version.value)), + _parms ? Icesat2Fields::defaultEGM(_parms->granuleFields.version.value) : NULL), spot(0, META_COLUMN), - cycle(_parms->granuleFields.cycle.value, META_COLUMN), - rgt(_parms->granuleFields.rgt.value, META_COLUMN), + cycle(_parms ? _parms->granuleFields.cycle.value : 0, META_COLUMN), + rgt(_parms ? _parms->granuleFields.rgt.value : 0, META_COLUMN), gt(0, META_COLUMN), - granule(_hdf13->name, META_SOURCE_ID), + granule(_hdf13 ? _hdf13->name : "", META_SOURCE_ID), active(false), readerPid(NULL), - readTimeoutMs(_parms->readTimeout.value * 1000), - beam(FString("%s", beam_str).c_str(true)), + readTimeoutMs(_parms ? _parms->readTimeout.value * 1000 : 0), + beam(beam_str ? FString("%s", beam_str).c_str(true) : NULL), outQ(NULL), parms(_parms), - hdf13(_hdf13) + hdf13(_hdf13), + dfKey(0) { - assert(_parms); - assert(_hdf13); - /* Call Parent Class Initialization of GeoColumns */ populateGeoColumns(); + /* Schema-only: skip all runtime initialization */ + if(!_parms) return; + /* Calculate Key */ dfKey = Icesat2Fields::calculateBeamKey(beam); @@ -149,8 +154,8 @@ Atl13DataFrame::~Atl13DataFrame (void) delete readerPid; delete [] beam; delete outQ; - parms->releaseLuaObject(); - hdf13->releaseLuaObject(); + if(parms) parms->releaseLuaObject(); + if(hdf13) hdf13->releaseLuaObject(); } /*---------------------------------------------------------------------------- diff --git a/datasets/icesat2/package/Atl24DataFrame.cpp b/datasets/icesat2/package/Atl24DataFrame.cpp index 1db04ad34..8a97cd1fe 100644 --- a/datasets/icesat2/package/Atl24DataFrame.cpp +++ b/datasets/icesat2/package/Atl24DataFrame.cpp @@ -52,6 +52,7 @@ const struct luaL_Reg Atl24DataFrame::LUA_META_TABLE[] = { {NULL, NULL} }; + /****************************************************************************** * atl24 READER CLASS ******************************************************************************/ @@ -61,6 +62,9 @@ const struct luaL_Reg Atl24DataFrame::LUA_META_TABLE[] = { *----------------------------------------------------------------------------*/ int Atl24DataFrame::luaCreate (lua_State* L) { + if(lua_gettop(L) == 0) + return createLuaObject(L, new Atl24DataFrame(L, NULL, NULL, NULL, NULL)); + Icesat2Fields* _parms = NULL; H5Object* _hdf24 = NULL; @@ -90,49 +94,52 @@ int Atl24DataFrame::luaCreate (lua_State* L) Atl24DataFrame::Atl24DataFrame (lua_State* L, const char* beam_str, Icesat2Fields* _parms, H5Object* _hdf24, const char* outq_name): GeoDataFrame(L, LUA_META_NAME, LUA_META_TABLE, { - {"class_ph", &class_ph}, - {"confidence", &confidence}, - {"time_ns", &time_ns}, - {"lat_ph", &lat_ph}, - {"lon_ph", &lon_ph}, - {"ortho_h", &ortho_h}, - {"surface_h", &surface_h}, - {"x_atc", &x_atc}, - {"y_atc", &y_atc}, + {"class_ph", &class_ph, "photon classification"}, + {"confidence", &confidence, "classification confidence"}, + {"time_ns", &time_ns, "GPS nanoseconds"}, + {"lat_ph", &lat_ph, "photon latitude"}, + {"lon_ph", &lon_ph, "photon longitude"}, + {"ortho_h", &ortho_h, "orthometric height (m)"}, + {"surface_h", &surface_h, "surface height (m)"}, + {"x_atc", &x_atc, "along-track distance (m)"}, + {"y_atc", &y_atc, "across-track distance (m)"}, }, { - {"spot", &spot}, - {"cycle", &cycle}, - {"region", ®ion}, - {"rgt", &rgt}, - {"gt", >}, - {"granule", &granule} + {"spot", &spot, "spot number (1-6)"}, + {"cycle", &cycle, "orbital cycle"}, + {"region", ®ion, "region number"}, + {"rgt", &rgt, "reference ground track"}, + {"gt", >, "ground track"}, + {"granule", &granule, "source granule name"} }, - Icesat2Fields::defaultEGM(_parms->granuleFields.version.value)), - granule(_hdf24->name, META_SOURCE_ID), + _parms ? Icesat2Fields::defaultEGM(_parms->granuleFields.version.value) : NULL), + granule(_hdf24 ? _hdf24->name : "", META_SOURCE_ID), active(false), readerPid(NULL), - readTimeoutMs(_parms->readTimeout.value * 1000), - beam(FString("%s", beam_str).c_str(true)), + readTimeoutMs(_parms ? _parms->readTimeout.value * 1000 : 0), + beam(beam_str ? FString("%s", beam_str).c_str(true) : NULL), outQ(NULL), parms(_parms), - hdf24(_hdf24) + hdf24(_hdf24), + dfKey(0) { - assert(_parms); - assert(_hdf24); + /* Register conditional columns (enabled=false for schema-only mode) */ + const bool schema_only = (_parms == NULL); + const bool non_compact = schema_only ? false : !parms->atl24.compact.value; + addColumn("ellipse_h", &ellipse_h, false, "ellipsoidal height (m)", "!atl24.compact", non_compact); + addColumn("invalid_kd", &invalid_kd, false, "invalid Kd flag", "!atl24.compact", non_compact); + addColumn("invalid_wind_speed", &invalid_wind_speed, false, "invalid wind speed flag", "!atl24.compact", non_compact); + addColumn("low_confidence_flag", &low_confidence_flag, false, "low confidence flag", "!atl24.compact", non_compact); + addColumn("night_flag", &night_flag, false, "night flag", "!atl24.compact", non_compact); + addColumn("sensor_depth_exceeded", &sensor_depth_exceeded, false, "sensor depth exceeded flag", "!atl24.compact", non_compact); + addColumn("sigma_thu", &sigma_thu, false, "total horizontal uncertainty (m)", "!atl24.compact", non_compact); + addColumn("sigma_tvu", &sigma_tvu, false, "total vertical uncertainty (m)", "!atl24.compact", non_compact); - /* Set Non-Compact Columns */ - if(!parms->atl24.compact.value) - { - addColumn("ellipse_h", &ellipse_h, false); - addColumn("invalid_kd", &invalid_kd, false); - addColumn("invalid_wind_speed", &invalid_wind_speed, false); - addColumn("low_confidence_flag", &low_confidence_flag, false); - addColumn("night_flag", &night_flag, false); - addColumn("sensor_depth_exceeded", &sensor_depth_exceeded, false); - addColumn("sigma_thu", &sigma_thu, false); - addColumn("sigma_tvu", &sigma_tvu, false); - } + /* Call Parent Class Initialization of GeoColumns */ + populateGeoColumns(); + + /* Schema-only: skip all runtime initialization */ + if(schema_only) return; /* Set MetaData from Parameters */ cycle = parms->granuleFields.cycle.value; @@ -145,9 +152,6 @@ Atl24DataFrame::Atl24DataFrame (lua_State* L, const char* beam_str, Icesat2Field /* Setup Output Queue (for messages) */ if(outq_name) outQ = new Publisher(outq_name); - /* Call Parent Class Initialization of GeoColumns */ - populateGeoColumns(); - /* Set Thread Specific Trace ID for H5Coro */ EventLib::stashId (traceId); @@ -165,8 +169,8 @@ Atl24DataFrame::~Atl24DataFrame (void) delete readerPid; delete [] beam; delete outQ; - parms->releaseLuaObject(); - hdf24->releaseLuaObject(); + if(parms) parms->releaseLuaObject(); + if(hdf24) hdf24->releaseLuaObject(); } /*---------------------------------------------------------------------------- diff --git a/datasets/icesat2/package/PhoReal.cpp b/datasets/icesat2/package/PhoReal.cpp index f89ec893d..e7ce712e6 100644 --- a/datasets/icesat2/package/PhoReal.cpp +++ b/datasets/icesat2/package/PhoReal.cpp @@ -57,6 +57,7 @@ const struct luaL_Reg PhoReal::LUA_META_TABLE[] = { {NULL, NULL} }; + /****************************************************************************** * METHODS ******************************************************************************/ @@ -66,6 +67,13 @@ const struct luaL_Reg PhoReal::LUA_META_TABLE[] = { *----------------------------------------------------------------------------*/ int PhoReal::luaCreate (lua_State* L) { + if(lua_gettop(L) == 0) + { + registerSchema(); + lua_pushnil(L); + return 1; + } + Icesat2Fields* _parms = NULL; try @@ -81,6 +89,60 @@ int PhoReal::luaCreate (lua_State* L) } } +/*---------------------------------------------------------------------------- + * registerSchema - register column schema without a live dataframe + *----------------------------------------------------------------------------*/ +void PhoReal::registerSchema (void) +{ + FieldColumn time_ns (Field::TIME_COLUMN); + FieldColumn latitude (Field::Y_COLUMN); + FieldColumn longitude (Field::X_COLUMN); + FieldColumn segment_id_beg; + FieldColumn x_atc; + FieldColumn y_atc; + FieldColumn photon_start; + FieldColumn photon_count; + FieldColumn pflags; + FieldColumn ground_photon_count; + FieldColumn vegetation_photon_count; + FieldColumn landcover; + FieldColumn snowcover; + FieldColumn solar_elevation; + FieldColumn h_te_median; + FieldColumn h_max_canopy; + FieldColumn h_min_canopy; + FieldColumn h_mean_canopy; + FieldColumn h_canopy (Field::Z_COLUMN); + FieldColumn canopy_openness; + FieldColumn> canopy_h_metrics; + + const GeoDataFrame::schema_description_t descs[] = { + {"time_ns", &time_ns, "GPS nanoseconds", NULL}, + {"latitude", &latitude, "latitude (EPSG:7912)", NULL}, + {"longitude", &longitude, "longitude (EPSG:7912)", NULL}, + {"segment_id_beg", &segment_id_beg, "first segment in extent", NULL}, + {"x_atc", &x_atc, "along-track distance (m)", NULL}, + {"y_atc", &y_atc, "across-track distance (m)", NULL}, + {"photon_start", &photon_start, "photon index of start of extent", NULL}, + {"photon_count", &photon_count, "number of photons in extent", NULL}, + {"pflags", &pflags, "processing flags", NULL}, + {"ground_photon_count", &ground_photon_count, "number of ground photons", NULL}, + {"vegetation_photon_count", &vegetation_photon_count, "number of vegetation photons", NULL}, + {"landcover", &landcover, "land cover classification", NULL}, + {"snowcover", &snowcover, "snow cover classification", NULL}, + {"solar_elevation", &solar_elevation, "solar elevation angle (deg)", NULL}, + {"h_te_median", &h_te_median, "median terrain height (m)", NULL}, + {"h_max_canopy", &h_max_canopy, "maximum canopy height (m)", NULL}, + {"h_min_canopy", &h_min_canopy, "minimum canopy height (m)", NULL}, + {"h_mean_canopy", &h_mean_canopy, "mean canopy height (m)", NULL}, + {"h_canopy", &h_canopy, "98th percentile canopy height (m)", NULL}, + {"canopy_openness", &canopy_openness, "canopy openness (std dev of relief)", NULL}, + {"canopy_h_metrics", &canopy_h_metrics, "canopy height percentile metrics", NULL}, + {NULL, NULL, NULL, NULL} + }; + GeoDataFrame::registerSchema("PhoReal", descs); +} + /*---------------------------------------------------------------------------- * Constructor *----------------------------------------------------------------------------*/ @@ -257,7 +319,7 @@ bool PhoReal::run (GeoDataFrame* dataframe) delete ancillary_columns; // finalize dataframe - dataframe->populateGeoColumns(); + dataframe->refreshGeoColumns(); // return success return true; diff --git a/datasets/icesat2/package/PhoReal.h b/datasets/icesat2/package/PhoReal.h index 336502888..50b64e34b 100644 --- a/datasets/icesat2/package/PhoReal.h +++ b/datasets/icesat2/package/PhoReal.h @@ -63,8 +63,9 @@ class PhoReal: public GeoDataFrame::FrameRunner * Methods *--------------------------------------------------------------------*/ - static int luaCreate (lua_State* L); - bool run (GeoDataFrame* dataframe) override; + static int luaCreate (lua_State* L); + static void registerSchema (void); + bool run (GeoDataFrame* dataframe) override; private: diff --git a/datasets/icesat2/package/SurfaceBlanket.cpp b/datasets/icesat2/package/SurfaceBlanket.cpp index 79cc5451f..6f4048489 100644 --- a/datasets/icesat2/package/SurfaceBlanket.cpp +++ b/datasets/icesat2/package/SurfaceBlanket.cpp @@ -51,6 +51,7 @@ const struct luaL_Reg SurfaceBlanket::LUA_META_TABLE[] = { {NULL, NULL} }; + /****************************************************************************** * METHODS ******************************************************************************/ @@ -60,6 +61,13 @@ const struct luaL_Reg SurfaceBlanket::LUA_META_TABLE[] = { *----------------------------------------------------------------------------*/ int SurfaceBlanket::luaCreate (lua_State* L) { + if(lua_gettop(L) == 0) + { + registerSchema(); + lua_pushnil(L); + return 1; + } + Icesat2Fields* _parms = NULL; try @@ -75,6 +83,36 @@ int SurfaceBlanket::luaCreate (lua_State* L) } } +/*---------------------------------------------------------------------------- + * registerSchema - register column schema without a live dataframe + *----------------------------------------------------------------------------*/ +void SurfaceBlanket::registerSchema (void) +{ + FieldColumn time_ns (Field::TIME_COLUMN); + FieldColumn latitude (Field::Y_COLUMN); + FieldColumn longitude (Field::X_COLUMN); + FieldColumn segment_id_beg; + FieldColumn x_atc; + FieldColumn y_atc; + FieldColumn top_of_surface (Field::Z_COLUMN); + FieldColumn median_ground; + FieldColumn pflags; + + const GeoDataFrame::schema_description_t descs[] = { + {"time_ns", &time_ns, "GPS nanoseconds", NULL}, + {"latitude", &latitude, "latitude (EPSG:7912)", NULL}, + {"longitude", &longitude, "longitude (EPSG:7912)", NULL}, + {"segment_id_beg", &segment_id_beg, "first segment in extent", NULL}, + {"x_atc", &x_atc, "along-track distance (m)", NULL}, + {"y_atc", &y_atc, "across-track distance (m)", NULL}, + {"top_of_surface", &top_of_surface, "top of reflective surface (m)", NULL}, + {"median_ground", &median_ground, "median ground elevation (m)", NULL}, + {"pflags", &pflags, "processing flags", NULL}, + {NULL, NULL, NULL, NULL} + }; + GeoDataFrame::registerSchema("SurfaceBlanket", descs); +} + /*---------------------------------------------------------------------------- * Constructor *----------------------------------------------------------------------------*/ @@ -209,7 +247,7 @@ bool SurfaceBlanket::run (GeoDataFrame* dataframe) delete ancillary_columns; // finalize dataframe - dataframe->populateGeoColumns(); + dataframe->refreshGeoColumns(); // update runtime return true; diff --git a/datasets/icesat2/package/SurfaceBlanket.h b/datasets/icesat2/package/SurfaceBlanket.h index af368a218..5f43500f2 100644 --- a/datasets/icesat2/package/SurfaceBlanket.h +++ b/datasets/icesat2/package/SurfaceBlanket.h @@ -58,8 +58,9 @@ class SurfaceBlanket: public GeoDataFrame::FrameRunner * Methods *--------------------------------------------------------------------*/ - static int luaCreate (lua_State* L); - bool run (GeoDataFrame* dataframe) override; + static int luaCreate (lua_State* L); + static void registerSchema (void); + bool run (GeoDataFrame* dataframe) override; private: diff --git a/datasets/icesat2/package/SurfaceFitter.cpp b/datasets/icesat2/package/SurfaceFitter.cpp index a6e2a6b5e..fbe487cf9 100644 --- a/datasets/icesat2/package/SurfaceFitter.cpp +++ b/datasets/icesat2/package/SurfaceFitter.cpp @@ -57,6 +57,7 @@ const struct luaL_Reg SurfaceFitter::LUA_META_TABLE[] = { {NULL, NULL} }; + /****************************************************************************** * METHODS ******************************************************************************/ @@ -66,6 +67,13 @@ const struct luaL_Reg SurfaceFitter::LUA_META_TABLE[] = { *----------------------------------------------------------------------------*/ int SurfaceFitter::luaCreate (lua_State* L) { + if(lua_gettop(L) == 0) + { + registerSchema(); + lua_pushnil(L); + return 1; + } + Icesat2Fields* _parms = NULL; try @@ -81,6 +89,46 @@ int SurfaceFitter::luaCreate (lua_State* L) } } +/*---------------------------------------------------------------------------- + * registerSchema - register column schema without a live dataframe + *----------------------------------------------------------------------------*/ +void SurfaceFitter::registerSchema (void) +{ + FieldColumn time_ns (Field::TIME_COLUMN); + FieldColumn latitude (Field::Y_COLUMN); + FieldColumn longitude (Field::X_COLUMN); + FieldColumn segment_id_beg; + FieldColumn x_atc; + FieldColumn y_atc; + FieldColumn photon_start; + FieldColumn pflags; + FieldColumn h_mean (Field::Z_COLUMN); + FieldColumn dh_fit_dx; + FieldColumn window_height; + FieldColumn n_fit_photons; + FieldColumn rms_misfit; + FieldColumn h_sigma; + + const GeoDataFrame::schema_description_t descs[] = { + {"time_ns", &time_ns, "GPS nanoseconds", NULL}, + {"latitude", &latitude, "latitude (EPSG:7912)", NULL}, + {"longitude", &longitude, "longitude (EPSG:7912)", NULL}, + {"segment_id_beg", &segment_id_beg, "first segment in extent", NULL}, + {"x_atc", &x_atc, "along-track distance (m)", NULL}, + {"y_atc", &y_atc, "across-track distance (m)", NULL}, + {"photon_start", &photon_start, "photon index of start of extent", NULL}, + {"pflags", &pflags, "processing flags", NULL}, + {"h_mean", &h_mean, "mean height from ellipsoid (m)", NULL}, + {"dh_fit_dx", &dh_fit_dx, "along-track slope", NULL}, + {"w_surface_window_final", &window_height, "final surface window height (m)", NULL}, + {"n_fit_photons", &n_fit_photons, "number of photons in fit", NULL}, + {"rms_misfit", &rms_misfit, "root-mean-square misfit (m)", NULL}, + {"h_sigma", &h_sigma, "height uncertainty (m)", NULL}, + {NULL, NULL, NULL, NULL} + }; + GeoDataFrame::registerSchema("SurfaceFitter", descs); +} + /*---------------------------------------------------------------------------- * Constructor *----------------------------------------------------------------------------*/ @@ -240,7 +288,7 @@ bool SurfaceFitter::run (GeoDataFrame* dataframe) delete ancillary_columns; // finalize dataframe - dataframe->populateGeoColumns(); + dataframe->refreshGeoColumns(); // update runtime return true; diff --git a/datasets/icesat2/package/SurfaceFitter.h b/datasets/icesat2/package/SurfaceFitter.h index 11349cb67..bf57743e6 100644 --- a/datasets/icesat2/package/SurfaceFitter.h +++ b/datasets/icesat2/package/SurfaceFitter.h @@ -56,8 +56,9 @@ class SurfaceFitter: public GeoDataFrame::FrameRunner * Methods *--------------------------------------------------------------------*/ - static int luaCreate (lua_State* L); - bool run (GeoDataFrame* dataframe) override; + static int luaCreate (lua_State* L); + static void registerSchema (void); + bool run (GeoDataFrame* dataframe) override; private: diff --git a/packages/core/CMakeLists.txt b/packages/core/CMakeLists.txt index 79ac8a007..277619867 100644 --- a/packages/core/CMakeLists.txt +++ b/packages/core/CMakeLists.txt @@ -144,6 +144,7 @@ install ( install ( FILES ${CMAKE_CURRENT_LIST_DIR}/data/leap-seconds.list + ${CMAKE_CURRENT_LIST_DIR}/data/openapi-base.json ${CMAKE_CURRENT_LIST_DIR}/crsfiles/EPSG7912.projjson ${CMAKE_CURRENT_LIST_DIR}/crsfiles/EPSG7912_EGM08.projjson ${CMAKE_CURRENT_LIST_DIR}/crsfiles/EPSG9989.projjson @@ -188,6 +189,8 @@ install ( ${CMAKE_CURRENT_LIST_DIR}/endpoints/prometheus.lua ${CMAKE_CURRENT_LIST_DIR}/endpoints/time.lua ${CMAKE_CURRENT_LIST_DIR}/endpoints/version.lua + ${CMAKE_CURRENT_LIST_DIR}/endpoints/openapi.lua + ${CMAKE_CURRENT_LIST_DIR}/endpoints/schema.lua DESTINATION ${CONFDIR}/api ) diff --git a/packages/core/data/openapi-base.json b/packages/core/data/openapi-base.json new file mode 100644 index 000000000..64a18474d --- /dev/null +++ b/packages/core/data/openapi-base.json @@ -0,0 +1,421 @@ +{ + "openapi": "3.1.0", + "info": { + "title": "SlideRule Earth API", + "description": "Server-side processing of NASA ICESat-2, GEDI, and other Earth science datasets. Returns subsetted, filtered, and algorithmically processed data as Parquet files.", + "version": "0.0.0", + "contact": { + "name": "SlideRule Earth", + "url": "https://slideruleearth.io" + }, + "license": { + "name": "BSD-3-Clause", + "url": "https://github.com/SlideRuleEarth/sliderule/blob/main/LICENSE" + } + }, + "servers": [ + { + "url": "https://slideruleearth.io", + "description": "Production cluster" + } + ], + "paths": { + "/source/version": { + "get": { + "operationId": "getVersion", + "summary": "Server version", + "tags": ["Discovery"], + "responses": { + "200": { + "description": "Version information", + "content": { + "application/json": { + "schema": { + "type": "object" + } + } + } + } + } + } + }, + "/source/health": { + "get": { + "operationId": "getHealth", + "summary": "Health check", + "tags": ["Discovery"], + "responses": { + "200": { + "description": "Health status", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "healthy": {"type": "boolean"} + } + } + } + } + } + } + } + }, + "/source/schema": { + "get": { + "operationId": "getSchema", + "summary": "Column schemas for current product APIs", + "description": "Returns the column schema (names, types, formats, descriptions) for the current product APIs — the x endpoints and their processing runners. Deprecated/legacy interfaces are intentionally excluded. Without the `api` parameter, returns all schemas as a map keyed by API name.", + "tags": ["Discovery"], + "parameters": [ + { + "name": "api", + "in": "query", + "required": false, + "schema": { + "type": "string" + }, + "description": "API name (e.g. Atl06DataFrame, SurfaceFitter). Omit to return the full schema map for all current APIs." + } + ], + "responses": { + "200": { + "description": "Column schema or API listing", + "content": { + "application/json": { + "schema": { + "type": "object" + } + } + } + } + } + } + }, + "/source/openapi": { + "get": { + "operationId": "getOpenAPI", + "summary": "OpenAPI specification", + "description": "Returns the complete OpenAPI 3.1 specification with column schemas injected from the live schema registry.", + "tags": ["Discovery"], + "responses": { + "200": { + "description": "OpenAPI 3.1 specification", + "content": { + "application/json": { + "schema": {"type": "object"} + } + } + } + } + } + }, + "/arrow/atl03x": { + "post": { + "operationId": "atl03x", + "summary": "ICESat-2 ATL03 photon-level data", + "description": "Returns geolocated photons from ICESat-2 ATL03 granules as an Apache Parquet file. The response schema depends on which processing stage is active: default returns Atl03DataFrame columns; with stages.fitter enabled, returns SurfaceFitter columns (fitted surface elevations); with stages.phoreal enabled, returns PhoReal columns (vegetation canopy metrics); with stages.blanket enabled, returns SurfaceBlanket columns (top-of-surface elevations). Also supports YAPC photon weighting (stages.yapc) and ATL08/ATL24 classification as additional columns on the default schema. Request parameters are documented at /source/defaults under the icesat2 key.", + "tags": ["ICESat-2"], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "parms": {"type": "object", "description": "Processing parameters (see /source/defaults for structure and defaults)"}, + "resource": {"type": "string", "description": "Granule name (e.g. ATL03_20181019065445_03150111_006_02.h5)"} + }, + "required": ["parms", "resource"] + } + } + } + }, + "responses": { + "200": { + "description": "Apache Parquet file with GeoParquet metadata. Schema varies by processing stage.", + "content": { + "application/octet-stream": { + "schema": { + "oneOf": [ + {"$ref": "#/components/schemas/Atl03DataFrame"}, + {"$ref": "#/components/schemas/SurfaceFitter"}, + {"$ref": "#/components/schemas/PhoReal"}, + {"$ref": "#/components/schemas/SurfaceBlanket"} + ] + } + } + } + } + } + } + }, + "/arrow/atl06x": { + "post": { + "operationId": "atl06x", + "summary": "ICESat-2 ATL06 land ice heights", + "description": "Returns land ice segment heights from ICESat-2 ATL06 granules as an Apache Parquet file. Each row is a 40m along-track segment with fitted surface elevation. Request parameters are documented at /source/defaults under the icesat2 key.", + "tags": ["ICESat-2"], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "parms": {"type": "object", "description": "Processing parameters (see /source/defaults for structure and defaults)"}, + "resource": {"type": "string", "description": "Granule name (e.g. ATL06_20181019065445_03150111_006_02.h5)"} + }, + "required": ["parms", "resource"] + } + } + } + }, + "responses": { + "200": { + "description": "Apache Parquet file with GeoParquet metadata", + "content": { + "application/octet-stream": { + "schema": {"$ref": "#/components/schemas/Atl06DataFrame"} + } + } + } + } + } + }, + "/arrow/atl08x": { + "post": { + "operationId": "atl08x", + "summary": "ICESat-2 ATL08 vegetation and canopy heights", + "description": "Returns vegetation and terrain segment data from ICESat-2 ATL08 granules as an Apache Parquet file. Each row is a 100m along-track segment with canopy height, terrain height, and land cover classification. Request parameters are documented at /source/defaults under the icesat2 key.", + "tags": ["ICESat-2"], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "parms": {"type": "object", "description": "Processing parameters (see /source/defaults for structure and defaults)"}, + "resource": {"type": "string", "description": "Granule name (e.g. ATL08_20181019065445_03150111_006_02.h5)"} + }, + "required": ["parms", "resource"] + } + } + } + }, + "responses": { + "200": { + "description": "Apache Parquet file with GeoParquet metadata", + "content": { + "application/octet-stream": { + "schema": {"$ref": "#/components/schemas/Atl08DataFrame"} + } + } + } + } + } + }, + "/arrow/atl13x": { + "post": { + "operationId": "atl13x", + "summary": "ICESat-2 ATL13 inland water surface heights", + "description": "Returns inland water body surface height measurements from ICESat-2 ATL13 granules as an Apache Parquet file. Each row is a water surface segment with orthometric height, water surface height, and depth. Request parameters are documented at /source/defaults under the icesat2 key.", + "tags": ["ICESat-2"], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "parms": {"type": "object", "description": "Processing parameters (see /source/defaults for structure and defaults)"}, + "resource": {"type": "string", "description": "Granule name (e.g. ATL13_20181019065445_03150111_006_02.h5)"} + }, + "required": ["parms", "resource"] + } + } + } + }, + "responses": { + "200": { + "description": "Apache Parquet file with GeoParquet metadata", + "content": { + "application/octet-stream": { + "schema": {"$ref": "#/components/schemas/Atl13DataFrame"} + } + } + } + } + } + }, + "/arrow/atl24x": { + "post": { + "operationId": "atl24x", + "summary": "ICESat-2 ATL24 bathymetry photon classification", + "description": "Returns bathymetry-classified photons from ICESat-2 ATL24 granules as an Apache Parquet file. Each row is a photon with classification (sea surface, seafloor, water column, etc.), heights, and uncertainty estimates. Request parameters are documented at /source/defaults under the icesat2 key.", + "tags": ["ICESat-2"], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "parms": {"type": "object", "description": "Processing parameters (see /source/defaults for structure and defaults)"}, + "resource": {"type": "string", "description": "Granule name (e.g. ATL24_20181019065445_03150111_002_01.h5)"} + }, + "required": ["parms", "resource"] + } + } + } + }, + "responses": { + "200": { + "description": "Apache Parquet file with GeoParquet metadata", + "content": { + "application/octet-stream": { + "schema": {"$ref": "#/components/schemas/Atl24DataFrame"} + } + } + } + } + } + }, + "/arrow/gedi01bx": { + "post": { + "operationId": "gedi01bx", + "summary": "GEDI L1B geolocated waveforms", + "description": "Returns geolocated waveform data from GEDI L1B granules as an Apache Parquet file. Each row is a shot with transmit and receive waveforms, geolocation, and quality flags. Request parameters are documented at /source/defaults under the gedi key.", + "tags": ["GEDI"], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "parms": {"type": "object", "description": "Processing parameters (see /source/defaults for structure and defaults)"}, + "resource": {"type": "string", "description": "Granule name (e.g. GEDI01_B_2019108002012_O01959_01_T03909_02_005_02_V002.h5)"} + }, + "required": ["parms", "resource"] + } + } + } + }, + "responses": { + "200": { + "description": "Apache Parquet file with GeoParquet metadata", + "content": { + "application/octet-stream": { + "schema": {"$ref": "#/components/schemas/Gedi01bDataFrame"} + } + } + } + } + } + }, + "/arrow/gedi02ax": { + "post": { + "operationId": "gedi02ax", + "summary": "GEDI L2A footprint elevations", + "description": "Returns ground elevation and canopy top height from GEDI L2A granules as an Apache Parquet file. Each row is a footprint with elevation from the lowest detected mode, highest return, sensitivity, and quality flags. Request parameters are documented at /source/defaults under the gedi key.", + "tags": ["GEDI"], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "parms": {"type": "object", "description": "Processing parameters (see /source/defaults for structure and defaults)"}, + "resource": {"type": "string", "description": "Granule name"} + }, + "required": ["parms", "resource"] + } + } + } + }, + "responses": { + "200": { + "description": "Apache Parquet file with GeoParquet metadata", + "content": { + "application/octet-stream": { + "schema": {"$ref": "#/components/schemas/Gedi02aDataFrame"} + } + } + } + } + } + }, + "/arrow/gedi04ax": { + "post": { + "operationId": "gedi04ax", + "summary": "GEDI L4A above-ground biomass density", + "description": "Returns above-ground biomass density estimates from GEDI L4A granules as an Apache Parquet file. Each row is a footprint with biomass density (Mg/ha), elevation, sensitivity, and quality flags. Request parameters are documented at /source/defaults under the gedi key.", + "tags": ["GEDI"], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "parms": {"type": "object", "description": "Processing parameters (see /source/defaults for structure and defaults)"}, + "resource": {"type": "string", "description": "Granule name"} + }, + "required": ["parms", "resource"] + } + } + } + }, + "responses": { + "200": { + "description": "Apache Parquet file with GeoParquet metadata", + "content": { + "application/octet-stream": { + "schema": {"$ref": "#/components/schemas/Gedi04aDataFrame"} + } + } + } + } + } + }, + "/arrow/casals1bx": { + "post": { + "operationId": "casals1bx", + "summary": "CASALS L1B geolocated photons", + "description": "Returns geolocated photons from CASALS L1B granules as an Apache Parquet file. Each row is a photon with latitude, longitude, reference height, and timestamp. Request parameters are documented at /source/defaults under the casals key.", + "tags": ["CASALS"], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "parms": {"type": "object", "description": "Processing parameters (see /source/defaults for structure and defaults)"}, + "resource": {"type": "string", "description": "Granule name"} + }, + "required": ["parms", "resource"] + } + } + } + }, + "responses": { + "200": { + "description": "Apache Parquet file with GeoParquet metadata", + "content": { + "application/octet-stream": { + "schema": {"$ref": "#/components/schemas/Casals1bDataFrame"} + } + } + } + } + } + } + }, + "components": { + "schemas": {} + } +} diff --git a/packages/core/endpoints/openapi.lua b/packages/core/endpoints/openapi.lua new file mode 100644 index 000000000..6162d70de --- /dev/null +++ b/packages/core/endpoints/openapi.lua @@ -0,0 +1,147 @@ +-- +-- ENDPOINT: /source/openapi +-- +-- INPUT: none +-- +-- OUTPUT: Complete OpenAPI 3.1 spec as JSON with live column schemas +-- injected from the GeoDataFrame schema registry. +-- +-- NOTES: Instantiates schema-only DataFrames at load time to populate +-- the schema registry, reads the base spec template from +-- {confdir}/openapi-base.json, injects column schemas into +-- components.schemas, and returns the complete spec. +-- + +local json = require("json") + +--------------------------------------------------------------- +-- Instantiate schema-only DataFrames (no threads, no H5 reads) +--------------------------------------------------------------- +if __icesat2__ then + icesat2.atl03x() + icesat2.atl06x() + icesat2.atl08x() + icesat2.atl13x() + icesat2.atl24x() + -- Runner schemas (replace atl03x columns when processing stages are active) + icesat2.fit() + icesat2.phoreal() + icesat2.blanket() +end + +if __gedi__ then + gedi.gedi01bx() + gedi.gedi02ax() + gedi.gedi04ax() +end + +if __casals__ then + casals.casals1bx() +end + +--------------------------------------------------------------- +-- Build response column schemas from the live registry +--------------------------------------------------------------- +local function build_column_schemas() + local result = {} + local apis = core.schema() + + for api_name, api_desc in pairs(apis) do + local schema = core.schema(api_name) + if schema and schema.columns then + local properties = {} + local required = {} + local metadata = {} + for _, col in ipairs(schema.columns) do + local prop = {} + prop.type = col.type + if col.format and col.format ~= "" then + prop.format = col.format + end + if col.items_type then + local items = {type = col.items_type} + if col.items_format and col.items_format ~= "" then + items.format = col.items_format + end + prop.items = items + end + if col.description and col.description ~= "" then + prop.description = col.description + end + if col.condition then + prop.description = (prop.description or "") .. " (condition: " .. col.condition .. ")" + end + + -- Separate per-row columns from per-file metadata + if col.role == "element" then + prop.description = (prop.description or "") .. " (per-file metadata, not a row column)" + metadata[col.name] = prop + else + properties[col.name] = prop + if not col.condition then + table.insert(required, col.name) + end + end + end + + local schema_entry = { + type = "object", + description = api_desc, + properties = properties + } + if #required > 0 then + schema_entry.required = required + end + if next(metadata) then + schema_entry["x-metadata"] = metadata + end + result[api_name] = schema_entry + end + end + + return result +end + +--------------------------------------------------------------- +-- Read the base spec template +--------------------------------------------------------------- +local base_path = __confdir .. "/openapi-base.json" +local f, err = io.open(base_path, "r") +if not f then + return json.encode({error = "failed to read base spec: " .. (err or "unknown")}), false +end +local base_text = f:read("*a") +f:close() + +local ok, base_spec = pcall(json.decode, base_text) +if not ok then + return json.encode({error = "failed to parse base spec: " .. tostring(base_spec)}), false +end + +--------------------------------------------------------------- +-- Inject live column schemas +--------------------------------------------------------------- +if not base_spec.components then + base_spec.components = {} +end +if not base_spec.components.schemas then + base_spec.components.schemas = {} +end + +local column_schemas = build_column_schemas() +for k, v in pairs(column_schemas) do + base_spec.components.schemas[k] = v +end + +--------------------------------------------------------------- +-- Add version info +--------------------------------------------------------------- +local version, build = sys.version() +if base_spec.info then + base_spec.info.version = version +end + +--------------------------------------------------------------- +-- Return the complete spec +--------------------------------------------------------------- +return json.encode(base_spec) diff --git a/packages/core/endpoints/schema.lua b/packages/core/endpoints/schema.lua new file mode 100644 index 000000000..eed4a053f --- /dev/null +++ b/packages/core/endpoints/schema.lua @@ -0,0 +1,77 @@ +-- +-- ENDPOINT: /source/schema +-- +-- INPUT: query string: api= +-- e.g. /source/schema?api=Atl03DataFrame +-- If no api parameter, returns all schemas as a map keyed by API name. +-- +-- OUTPUT: JSON object with column schemas for the current product APIs +-- (the "x" endpoints: atl03x, atl06x, atl08x, atl13x, atl24x, +-- gedi01bx, gedi02ax, gedi04ax, casals1bx) and their associated +-- processing runners (SurfaceFitter, PhoReal, SurfaceBlanket). +-- Deprecated/legacy interfaces are intentionally excluded. +-- +local json = require("json") + +-- Instantiate schema-only DataFrames to populate the schema registry. +-- Called with no arguments, these create lightweight objects that register +-- column names and descriptions without starting threads or opening files. +if __icesat2__ then + icesat2.atl03x() + icesat2.atl06x() + icesat2.atl08x() + icesat2.atl13x() + icesat2.atl24x() + -- Runner schemas (replace atl03x columns when processing stages are active) + icesat2.fit() + icesat2.phoreal() + icesat2.blanket() +end + +if __gedi__ then + gedi.gedi01bx() + gedi.gedi02ax() + gedi.gedi04ax() +end + +if __casals__ then + casals.casals1bx() +end + +-- Pre-compute all schemas at load time +local all_schemas = {} +local apis = core.schema() +for api_name, _ in pairs(apis) do + all_schemas[api_name] = core.schema(api_name) +end +local cached_all = json.encode(all_schemas) + +-- Pre-compute per-api responses +local cached_per_api = {} +for api_name, schema in pairs(all_schemas) do + cached_per_api[api_name] = json.encode(schema) +end + +-- Handle request +local function parse_query(qs) + local params = {} + if qs then + for k, v in qs:gmatch("([^&=]+)=([^&=]+)") do + params[k] = v + end + end + return params +end + +local params = parse_query(_rqst and _rqst.arg or nil) +local api = params["api"] + +if not api then + return cached_all +end + +if cached_per_api[api] then + return cached_per_api[api] +end + +return json.encode({error = "unknown api: " .. api}) diff --git a/packages/core/package/FieldDictionary.h b/packages/core/package/FieldDictionary.h index 40b77e33c..e67490a17 100644 --- a/packages/core/package/FieldDictionary.h +++ b/packages/core/package/FieldDictionary.h @@ -62,6 +62,8 @@ class FieldDictionary: public Field typedef struct { const char* name; Field* field; + const char* description = nullptr; + const char* condition = nullptr; } init_entry_t; typedef struct { diff --git a/packages/core/package/FieldMap.h b/packages/core/package/FieldMap.h index 41d0ee814..b65df0cff 100644 --- a/packages/core/package/FieldMap.h +++ b/packages/core/package/FieldMap.h @@ -58,6 +58,8 @@ typedef struct { const char* name; T* field; + const char* description = nullptr; + const char* condition = nullptr; } init_entry_t; typedef struct { diff --git a/packages/core/package/GeoDataFrame.cpp b/packages/core/package/GeoDataFrame.cpp index 0e30eb75d..be2ca517c 100644 --- a/packages/core/package/GeoDataFrame.cpp +++ b/packages/core/package/GeoDataFrame.cpp @@ -62,6 +62,9 @@ const char* GeoDataFrame::SOURCE_ID = "srcid"; const char* GeoDataFrame::SOURCE_TABLE = "srctbl"; const char* GeoDataFrame::SOURCE_DATA = "srcdata"; +Mutex GeoDataFrame::schemaMut; +Dictionary GeoDataFrame::schemaRegistry; + const char* GeoDataFrame::LUA_META_NAME = "GeoDataFrame"; const struct luaL_Reg GeoDataFrame::LUA_META_TABLE[] = { {NULL, NULL} @@ -707,9 +710,21 @@ vector GeoDataFrame::getColumnNames(void) const /*---------------------------------------------------------------------------- * addColumn - assumes memory is properly allocated already + * + * When description is provided, stashes it in pendingDescs for schema + * registration. When enabled is false, the column is not added to the + * dataframe but the description is still registered (for documenting + * conditional columns in the schema). *----------------------------------------------------------------------------*/ -bool GeoDataFrame::addColumn (const char* name, FieldUntypedColumn* column, bool free_on_delete) +bool GeoDataFrame::addColumn (const char* name, FieldUntypedColumn* column, bool free_on_delete, + const char* description, const char* condition, bool enabled) { + if(description) + pendingDescs.push_back({name, column, description, condition}); + + if(!enabled) + return true; + return columnFields.add(name, column, free_on_delete); } @@ -874,11 +889,65 @@ bool GeoDataFrame::deleteColumn (const char* name) } /*---------------------------------------------------------------------------- - * populateGeoColumn + * populateGeoColumns + * + * Called from subclass constructors after derived members are initialized. + * Uses pendingDescs (stashed during base construction) to register the + * schema. Field pointers are now safe to dereference since derived members + * are fully constructed by this point. *----------------------------------------------------------------------------*/ void GeoDataFrame::populateGeoColumns (void) { - // populate geo columns + discoverGeoColumns(); + + schemaMut.lock(); + { + if(!schemaRegistry.find(LuaMetaName)) + { + Schema* schema = new Schema; + schema->name = LuaMetaName; + schema->description = LuaMetaName; + + for(const auto& desc : pendingDescs) + { + SchemaField sf; + sf.name = desc.name; + encoding2openapi(desc.field->encoding, sf); + sf.description = desc.description ? desc.description : ""; + sf.condition = desc.condition ? desc.condition : ""; + sf.role = (desc.field->type == Field::COLUMN) ? "column" : "element"; + schema->fields.push_back(sf); + } + + schemaRegistry.add(LuaMetaName, schema); + } + } + schemaMut.unlock(); + + pendingDescs.clear(); +} + +/*---------------------------------------------------------------------------- + * refreshGeoColumns + * + * Called by FrameRunners after replacing columns on a dataframe. + * Re-discovers which columns carry time/x/y/z roles. + * Schema is NOT registered here — runners register once at startup. + *----------------------------------------------------------------------------*/ +void GeoDataFrame::refreshGeoColumns (void) +{ + timeColumn = NULL; + xColumn = NULL; + yColumn = NULL; + zColumn = NULL; + discoverGeoColumns(); +} + +/*---------------------------------------------------------------------------- + * discoverGeoColumns + *----------------------------------------------------------------------------*/ +void GeoDataFrame::discoverGeoColumns (void) +{ Dictionary::entry_t>::Iterator iter(columnFields.fields); for(int f = 0; f < iter.length; f++) { @@ -919,6 +988,44 @@ void GeoDataFrame::populateGeoColumns (void) } } +/*---------------------------------------------------------------------------- + * registerSchema + * + * Single-pass: iterates the description array directly. Each entry carries + * a Field* pointer to the live member, so encoding is read from the field + * itself — no duplication. Role (column vs element) is derived from + * field->type. + *----------------------------------------------------------------------------*/ +void GeoDataFrame::registerSchema (const char* schema_name, const schema_description_t* descs) +{ + schemaMut.lock(); + { + if(!schemaRegistry.find(schema_name)) + { + Schema* schema = new Schema; + schema->name = schema_name; + schema->description = schema_name; + + if(descs) + { + for(int i = 0; descs[i].name != NULL; i++) + { + SchemaField sf; + sf.name = descs[i].name; + encoding2openapi(descs[i].field->encoding, sf); + sf.description = descs[i].description ? descs[i].description : ""; + sf.condition = descs[i].condition ? descs[i].condition : ""; + sf.role = (descs[i].field->type == Field::COLUMN) ? "column" : "element"; + schema->fields.push_back(sf); + } + } + + schemaRegistry.add(schema_name, schema); + } + } + schemaMut.unlock(); +} + /*---------------------------------------------------------------------------- * operator[] *----------------------------------------------------------------------------*/ @@ -1219,6 +1326,150 @@ void GeoDataFrame::addAncillaryColumns (Dictionary* ancillary_colum } } +/*---------------------------------------------------------------------------- + * encoding2openapi + *----------------------------------------------------------------------------*/ +void GeoDataFrame::encoding2openapi (uint32_t encoding, SchemaField& sf) +{ + const uint32_t base_type = encoding & Field::TYPE_MASK; + const bool nested = (encoding & Field::NESTED_MASK) != 0; + + string base_openapi_type; + string base_openapi_format; + + switch(base_type) + { + case RecordObject::BOOL: base_openapi_type = "boolean"; break; + case RecordObject::INT8: base_openapi_type = "integer"; base_openapi_format = "int8"; break; + case RecordObject::INT16: base_openapi_type = "integer"; base_openapi_format = "int16"; break; + case RecordObject::INT32: base_openapi_type = "integer"; base_openapi_format = "int32"; break; + case RecordObject::INT64: base_openapi_type = "integer"; base_openapi_format = "int64"; break; + case RecordObject::UINT8: base_openapi_type = "integer"; base_openapi_format = "uint8"; break; + case RecordObject::UINT16: base_openapi_type = "integer"; base_openapi_format = "uint16"; break; + case RecordObject::UINT32: base_openapi_type = "integer"; base_openapi_format = "uint32"; break; + case RecordObject::UINT64: base_openapi_type = "integer"; base_openapi_format = "uint64"; break; + case RecordObject::FLOAT: base_openapi_type = "number"; base_openapi_format = "float"; break; + case RecordObject::DOUBLE: base_openapi_type = "number"; base_openapi_format = "double"; break; + case RecordObject::TIME8: base_openapi_type = "string"; base_openapi_format = "timestamp-ns"; break; + case RecordObject::STRING: base_openapi_type = "string"; break; + default: base_openapi_type = "string"; break; + } + + if(nested) + { + sf.type = "array"; + sf.items_type = base_openapi_type; + sf.items_format = base_openapi_format; + } + else + { + sf.type = base_openapi_type; + sf.format = base_openapi_format; + } +} + +/*---------------------------------------------------------------------------- + * luaSchema - core.schema([api_name]) + * + * 0 args: returns {meta_name = description, ...} + * 1 arg: returns {description = "...", columns = [{name, type, format, items_type, items_format, description, condition, role}, ...]} + *----------------------------------------------------------------------------*/ +int GeoDataFrame::luaSchema (lua_State* L) +{ + const int num_args = lua_gettop(L); + + if(num_args == 0) + { + // return listing of all registered schemas + lua_newtable(L); + schemaMut.lock(); + { + Dictionary::Iterator iter(schemaRegistry); + for(int i = 0; i < iter.length; i++) + { + lua_pushstring(L, iter[i].value->description.c_str()); + lua_setfield(L, -2, iter[i].key); + } + } + schemaMut.unlock(); + return 1; + } + + // return schema for a specific API + const char* api_name = lua_tostring(L, 1); + if(api_name == NULL) + { + lua_pushnil(L); + return 1; + } + + schemaMut.lock(); + Schema* schema = NULL; + const bool found = schemaRegistry.find(api_name, &schema); + schemaMut.unlock(); + + if(!found || schema == NULL) + { + lua_pushnil(L); + return 1; + } + + // build result table + lua_newtable(L); + + lua_pushstring(L, schema->description.c_str()); + lua_setfield(L, -2, "description"); + + // columns array + lua_newtable(L); + for(size_t i = 0; i < schema->fields.size(); i++) + { + const SchemaField& f = schema->fields[i]; + lua_newtable(L); + + lua_pushstring(L, f.name.c_str()); + lua_setfield(L, -2, "name"); + + lua_pushstring(L, f.type.c_str()); + lua_setfield(L, -2, "type"); + + if(!f.format.empty()) + { + lua_pushstring(L, f.format.c_str()); + lua_setfield(L, -2, "format"); + } + + if(!f.items_type.empty()) + { + lua_pushstring(L, f.items_type.c_str()); + lua_setfield(L, -2, "items_type"); + } + + if(!f.items_format.empty()) + { + lua_pushstring(L, f.items_format.c_str()); + lua_setfield(L, -2, "items_format"); + } + + lua_pushstring(L, f.description.c_str()); + lua_setfield(L, -2, "description"); + + lua_pushstring(L, f.role.c_str()); + lua_setfield(L, -2, "role"); + + if(!f.condition.empty()) + { + lua_pushstring(L, f.condition.c_str()); + lua_setfield(L, -2, "condition"); + } + + lua_rawseti(L, -2, static_cast(i + 1)); + } + lua_setfield(L, -2, "columns"); + + return 1; +} + /*---------------------------------------------------------------------------- * Constructor *----------------------------------------------------------------------------*/ @@ -1261,6 +1512,20 @@ GeoDataFrame::GeoDataFrame( lua_State* L, LuaEngine::setAttrFunc(L, "run", luaRun); LuaEngine::setAttrFunc(L, "finished", luaRunComplete); + // stash descriptions from init lists (field pointers are stored + // but NOT dereferenced — derived members initialize after base + // construction; pointers are only read in populateGeoColumns()) + for(const auto& elem : column_list) + { + if(elem.description) + pendingDescs.push_back({elem.name, elem.field, elem.description, elem.condition}); + } + for(const auto& elem : meta_list) + { + if(elem.description) + pendingDescs.push_back({elem.name, elem.field, elem.description, elem.condition}); + } + // start runner runPid = new Thread(runThread, this); } diff --git a/packages/core/package/GeoDataFrame.h b/packages/core/package/GeoDataFrame.h index 31ffc0f90..7853345ad 100644 --- a/packages/core/package/GeoDataFrame.h +++ b/packages/core/package/GeoDataFrame.h @@ -121,6 +121,32 @@ class GeoDataFrame: public LuaObject, public Field GeoDataFrame::column_op_t op; } ancillary_t; + struct SchemaField { + string name; + string type; // OpenAPI type: "number", "integer", "string", "boolean", "array" + string format; // OpenAPI format: "double", "float", "int32", "timestamp-ns", etc. (empty for arrays) + string items_type; // for arrays: element OpenAPI type (e.g. "number") + string items_format;// for arrays: element OpenAPI format (e.g. "float") + string description; + string role; // "column" or "element" + string condition; // empty = always present; non-empty = request parameter condition + }; + + struct Schema { + string name; + string description; + vector fields; + }; + + // Schema descriptions for runners (PhoReal, SurfaceFitter, SurfaceBlanket) + // which build columns at runtime outside the constructor init list. + typedef struct { + const char* name; + const Field* field; // pointer to live member — encoding read from here + const char* description; + const char* condition; // NULL = always present; non-NULL = request parameter condition + } schema_description_t; + /*-------------------------------------------------------------------- * Subclasses *--------------------------------------------------------------------*/ @@ -173,6 +199,8 @@ class GeoDataFrame: public LuaObject, public Field static void init (void); static int luaCreate (lua_State* L); + static int luaSchema (lua_State* L); + static void registerSchema (const char* schema_name, const schema_description_t* descs); void clear (void) override; long length (void) const override; @@ -181,7 +209,8 @@ class GeoDataFrame: public LuaObject, public Field void setNumRows (long rows); long appendFromBuffer (const char* name, const uint8_t* buffer, long size, uint32_t column_encoding=0, bool nodata=false); vector getColumnNames (void) const; - bool addColumn (const char* name, FieldUntypedColumn* column, bool free_on_delete); + bool addColumn (const char* name, FieldUntypedColumn* column, bool free_on_delete, + const char* description=nullptr, const char* condition=nullptr, bool enabled=true); bool addNewColumn (const char* name, uint32_t column_encoding); bool addExistingColumn (const char* name, FieldUntypedColumn* column); FieldUntypedColumn* getColumn (const char* name, bool no_throw=false) const; @@ -189,6 +218,7 @@ class GeoDataFrame: public LuaObject, public Field Field* getMetaData (const char* name, Field::type_t _type=Field::FIELD, bool no_throw=false) const; bool deleteColumn (const char* name); void populateGeoColumns (void); + void refreshGeoColumns (void); const FieldUntypedColumn& operator[] (const char* key) const; FieldUntypedColumn* getUnsafe (const char* key) const; @@ -291,10 +321,35 @@ class GeoDataFrame: public LuaObject, public Field static int luaRun (lua_State* L); static int luaRunComplete (lua_State* L); + static void encoding2openapi (uint32_t encoding, SchemaField& sf); + void discoverGeoColumns (void); + + /*-------------------------------------------------------------------- + * Typedefs + *--------------------------------------------------------------------*/ + + // Stashed descriptions extracted from init lists during base + // construction. Consumed by populateGeoColumns() once derived + // members are initialized, then cleared. + // + // The field pointer is stored but NOT dereferenced until + // populateGeoColumns() runs in the subclass constructor body, + // by which point all derived members are fully initialized. + struct stashed_desc_t { + const char* name; + const Field* field; + const char* description; + const char* condition; + }; + /*-------------------------------------------------------------------- * Data *--------------------------------------------------------------------*/ + static Mutex schemaMut; + static Dictionary schemaRegistry; + + vector pendingDescs; bool inError; long numRows; FieldMap columnFields; diff --git a/packages/core/package/core.cpp b/packages/core/package/core.cpp index e5d5a9ad7..26e6bda3a 100644 --- a/packages/core/package/core.cpp +++ b/packages/core/package/core.cpp @@ -145,6 +145,7 @@ static int core_open (lua_State *L) {"ams", AmsLib::luaRequest}, {"parms", RequestFields::luaCreate}, {"send2user", OutputLib::luaSend2User}, + {"schema", GeoDataFrame::luaSchema}, #ifdef __unittesting__ {"ut_dictionary", UT_Dictionary::luaCreate}, {"ut_field", UT_Field::luaCreate}, diff --git a/scripts/test_schema.sh b/scripts/test_schema.sh new file mode 100755 index 000000000..5af68bae7 --- /dev/null +++ b/scripts/test_schema.sh @@ -0,0 +1,76 @@ +#!/usr/bin/env bash +# +# Regenerate tmp_schema_test/ by starting a local sliderule server in Docker +# and fetching all schema endpoints. +# +# Usage: ./scripts/test_schema.sh [output_dir] +# +# Requires: build artifacts in stage/sliderule/ (run the Docker build first). +# +set -euo pipefail + +REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)" +OUT_DIR="${1:-$REPO_ROOT/tmp_schema_test}" +CONTAINER_NAME="sliderule-schema-test" +PORT=9081 +BASE_URL="http://localhost:$PORT" +IMAGE="742127912612.dkr.ecr.us-west-2.amazonaws.com/sliderule-buildenv:latest" + +# Clean up container on exit +cleanup() { docker stop "$CONTAINER_NAME" >/dev/null 2>&1 || true; } +trap cleanup EXIT + +# Start the server +echo "Starting sliderule server..." +docker run -d --rm --name "$CONTAINER_NAME" \ + -v "$REPO_ROOT":"$REPO_ROOT" \ + -e LOG_FORMAT=FMT_TEXT \ + -e ENVIRONMENT_VERSION=dirty \ + -e IPV4=127.0.0.1 \ + -p "$PORT:$PORT" \ + "$IMAGE" \ + "$REPO_ROOT/stage/sliderule/bin/sliderule" \ + "$REPO_ROOT/targets/slideruleearth/server-local.lua" >/dev/null + +# Wait for server to be ready +echo -n "Waiting for server" +for i in $(seq 1 30); do + if curl -sf "$BASE_URL/source/health" -o /dev/null 2>/dev/null; then + echo " ready" + break + fi + echo -n "." + sleep 1 +done + +mkdir -p "$OUT_DIR" + +# Helper: fetch JSON and pretty-print to file +fetch() { + curl -sf "$1" | python3 -m json.tool > "$2" +} + +# Health check +fetch "$BASE_URL/source/health" "$OUT_DIR/health.json" +echo "health OK" + +# All schemas +fetch "$BASE_URL/source/schema" "$OUT_DIR/schema_all.json" +echo "schema_all OK" + +# Per-API schemas +APIS=$(python3 -c "import json,sys; print('\n'.join(json.load(sys.stdin).keys()))" < "$OUT_DIR/schema_all.json") +for api in $APIS; do + fetch "$BASE_URL/source/schema?api=$api" "$OUT_DIR/schema_${api}.json" + echo "schema_${api} OK" +done + +# Error case +fetch "$BASE_URL/source/schema?api=NoSuchApi" "$OUT_DIR/schema_error.json" +echo "schema_error OK" + +# OpenAPI spec +fetch "$BASE_URL/source/openapi" "$OUT_DIR/openapi.json" +echo "openapi OK" + +echo "Done. Output in $OUT_DIR/" diff --git a/targets/slideruleearth/server-local.lua b/targets/slideruleearth/server-local.lua new file mode 100644 index 000000000..d44f565da --- /dev/null +++ b/targets/slideruleearth/server-local.lua @@ -0,0 +1,12 @@ +-------------------------------------------------- +-- Minimal Local Server (no AWS dependencies) +-------------------------------------------------- + +-- Configure Application Endpoints -- +local source_endpoint = core.endpoint():global("SourceEndpoint") +local arrow_endpoint = arrow.endpoint():global("ArrowEndpoint") + +-- Run Application HTTP Server -- +local app_server = core.httpd(9081):global("AppServer") +app_server:attach(source_endpoint, "/source") +app_server:attach(arrow_endpoint, "/arrow")