diff --git a/.github/workflows/dev-forestgeo-livesite.yml b/.github/workflows/dev-forestgeo-livesite.yml new file mode 100644 index 00000000..3bfbc59b --- /dev/null +++ b/.github/workflows/dev-forestgeo-livesite.yml @@ -0,0 +1,106 @@ +# Docs for the Azure Web Apps Deploy action: https://github.com/Azure/webapps-deploy +# More GitHub Actions for Azure: https://github.com/Azure/actions + +name: ForestGEO Development Live Site Deployment Pipeline + +on: + push: + branches: + - 'forestgeo-app-*' + workflow_dispatch: + +jobs: + build-app-development: + if: startsWith(github.ref, 'refs/heads/forestgeo-app-') + runs-on: ubuntu-latest + environment: development + + steps: + - uses: actions/checkout@v4 + + - name: Set up Node.js version + uses: actions/setup-node@v3 + with: + node-version: '18.x' + + - name: create env file (in frontend/ directory) -- development + id: create-env-file-dev + run: | + touch frontend/.env + echo AZURE_AD_CLIENT_SECRET=${{ secrets.AZURE_AD_CLIENT_SECRET }} >> frontend/.env + echo AZURE_AD_CLIENT_ID=${{ secrets.AZURE_AD_CLIENT_ID }} >> frontend/.env + echo AZURE_AD_TENANT_ID=${{ secrets.AZURE_AD_TENANT_ID }} >> frontend/.env + echo NEXTAUTH_SECRET=${{ secrets.NEXTAUTH_SECRET }} >> frontend/.env + echo NEXTAUTH_URL=${{ secrets.NEXTAUTH_URL_DEV }} >> frontend/.env + echo AZURE_SQL_USER=${{ secrets.AZURE_SQL_USER }} >> frontend/.env + echo AZURE_SQL_PASSWORD=${{ secrets.AZURE_SQL_PASSWORD }} >> frontend/.env + echo AZURE_SQL_SERVER=${{ secrets.AZURE_SQL_SERVER }} >> frontend/.env + echo AZURE_SQL_DATABASE=${{ secrets.AZURE_SQL_DATABASE }} >> frontend/.env + echo AZURE_SQL_PORT=${{ secrets.AZURE_SQL_PORT }} >> frontend/.env + echo AZURE_STORAGE_SAS_CONNECTION_STRING=${{ secrets.AZURE_STORAGE_SAS_CONNECTION_STRING }} >> frontend/.env + echo AZURE_SQL_SCHEMA=${{ secrets.AZURE_SQL_SCHEMA }} >> frontend/.env + echo AZURE_SQL_CATALOG_SCHEMA=${{ secrets.AZURE_SQL_CATALOG_SCHEMA }} >> frontend/.env + echo AZURE_STORAGE_CONNECTION_STRING=${{ secrets.AZURE_STORAGE_CONNECTION_STRING }} >> frontend/.env + echo NEXTAUTH_DEBUG=true >> frontend/.env + echo NODE_ENV=development >> frontend/.env + echo PORT=3000 >> frontend/.env + echo FG_PAT=${{ secrets.FG_PAT }} >> frontend/.env + echo OWNER=${{ secrets.OWNER }} >> frontend/.env + echo REPO=${{ secrets.REPO }} >> frontend/.env + + - name: Cache node modules + uses: actions/cache@v2 + with: + path: frontend/node_modules + key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} + restore-keys: | + ${{ runner.os }}-node- + + - name: Cache Next.js build + uses: actions/cache@v2 + with: + path: frontend/build/cache + key: ${{ runner.os }}-next-${{ hashFiles('**/package-lock.json') }}-${{ hashFiles('**/.next/cache') }} + restore-keys: | + ${{ runner.os }}-next- + ${{ runner.os }}-next-${{ hashFiles('**/package-lock.json') }} + + - name: move into frontend --> npm install, build, and test + run: | + cd frontend/ + npm install + npm run build + npm run test --if-present + + - name: Move directories into build/standalone to reduce app load + run: | + mv ./frontend/build/static ./frontend/build/standalone/build + mv ./frontend/public ./frontend/build/standalone + + - name: Upload build artifact + uses: actions/upload-artifact@v4 + with: + name: app-build + path: frontend/build/standalone + + deploy-app-development: + needs: build-app-development + runs-on: ubuntu-latest + environment: development + + steps: + - name: Download build artifact + uses: actions/download-artifact@v4 + with: + name: app-build + path: frontend/build/standalone + + - name: 'Deploy to Azure Web App (development)' + id: deploy-to-webapp-dev + if: startsWith(github.ref, 'refs/heads/forestgeo-app-') + uses: azure/webapps-deploy@v2 + with: + app-name: 'forestgeo-development' + slot-name: 'Production' + publish-profile: ${{ secrets.AZUREAPPSERVICE_PUBLISHPROFILE_DEVELOPMENT }} + package: frontend/build/standalone \ No newline at end of file diff --git a/.github/workflows/main-forestgeo-livesite.yml b/.github/workflows/main-forestgeo-livesite.yml index 3bc0700a..86c5e5e2 100644 --- a/.github/workflows/main-forestgeo-livesite.yml +++ b/.github/workflows/main-forestgeo-livesite.yml @@ -10,9 +10,10 @@ on: workflow_dispatch: jobs: - build-and-deploy: + build-app-production: + if: github.ref == 'refs/heads/main' runs-on: ubuntu-latest - environment: development + environment: production steps: - uses: actions/checkout@v4 @@ -22,14 +23,14 @@ jobs: with: node-version: '18.x' - - name: create env file (in frontend/ directory) + - name: create env file (in frontend/ directory) -- production + id: create-env-file-prod run: | touch frontend/.env - echo AZURE_AD_CLIENT_SECRET=${{ secrets.AZURE_AD_CLIENT_SECRET }} >> frontend/.env - echo AZURE_AD_CLIENT_ID=${{ secrets.AZURE_AD_CLIENT_ID }} >> frontend/.env - echo AZURE_AD_TENANT_ID=${{ secrets.AZURE_AD_TENANT_ID }} >> frontend/.env + echo AZURE_AD_CLIENT_SECRET=${{ secrets.AZURE_AD_CLIENT_SECRET_PRODUCTION }} >> frontend/.env + echo AZURE_AD_CLIENT_ID=${{ secrets.AZURE_AD_CLIENT_ID_PRODUCTION }} >> frontend/.env + echo AZURE_AD_TENANT_ID=${{ secrets.AZURE_AD_TENANT_ID_PRODUCTION }} >> frontend/.env echo NEXTAUTH_SECRET=${{ secrets.NEXTAUTH_SECRET }} >> frontend/.env - echo NEXTAUTH_URL=${{ secrets.NEXTAUTH_URL }} >> frontend/.env echo AZURE_SQL_USER=${{ secrets.AZURE_SQL_USER }} >> frontend/.env echo AZURE_SQL_PASSWORD=${{ secrets.AZURE_SQL_PASSWORD }} >> frontend/.env echo AZURE_SQL_SERVER=${{ secrets.AZURE_SQL_SERVER }} >> frontend/.env @@ -39,12 +40,12 @@ jobs: echo AZURE_SQL_SCHEMA=${{ secrets.AZURE_SQL_SCHEMA }} >> frontend/.env echo AZURE_SQL_CATALOG_SCHEMA=${{ secrets.AZURE_SQL_CATALOG_SCHEMA }} >> frontend/.env echo AZURE_STORAGE_CONNECTION_STRING=${{ secrets.AZURE_STORAGE_CONNECTION_STRING }} >> frontend/.env - echo NODE_ENV=development >> frontend/.env + echo NEXTAUTH_DEBUG=true >> frontend/.env + echo NODE_ENV=production >> frontend/.env echo PORT=3000 >> frontend/.env - - - name: Write Certificate to File - run: | - echo "${{ secrets.CERTIFICATE }}" > frontend/DigiCertGlobalRootCA.crt.pem + echo FG_PAT=${{ secrets.FG_PAT }} >> frontend/.env + echo OWNER=${{ secrets.OWNER }} >> frontend/.env + echo REPO=${{ secrets.REPO }} >> frontend/.env - name: Cache node modules uses: actions/cache@v2 @@ -54,6 +55,15 @@ jobs: restore-keys: | ${{ runner.os }}-node- + - name: Cache Next.js build + uses: actions/cache@v2 + with: + path: frontend/.next/cache + key: ${{ runner.os }}-next-${{ hashFiles('**/package-lock.json') }}-${{ hashFiles('**/.next/cache') }} + restore-keys: | + ${{ runner.os }}-next- + ${{ runner.os }}-next-${{ hashFiles('**/package-lock.json') }} + - name: move into frontend --> npm install, build, and test run: | cd frontend/ @@ -65,13 +75,31 @@ jobs: run: | mv ./frontend/build/static ./frontend/build/standalone/build mv ./frontend/public ./frontend/build/standalone - mv ./frontend/*.pem ./frontend/build/standalone/ - - name: 'Deploy to Azure Web App' - id: deploy-to-webapp + - name: Upload build artifact + uses: actions/upload-artifact@v4 + with: + name: app-build + path: frontend/build/standalone + + deploy-app-production: + needs: build-app-production + runs-on: ubuntu-latest + environment: production + + steps: + - name: Download build artifact + uses: actions/download-artifact@v4 + with: + name: app-build + path: frontend/build/standalone + + - name: 'Deploy to Azure Web App (main)' + id: deploy-to-webapp-main + if: github.ref == 'refs/heads/main' uses: azure/webapps-deploy@v2 with: app-name: 'forestgeo-livesite' slot-name: 'Production' - publish-profile: ${{ secrets.AZUREAPPSERVICE_PUBLISHPROFILE_852346BD764D45D08854E6679137F844 }} - package: ./frontend/build/standalone + publish-profile: ${{ secrets.AZUREAPPSERVICE_PUBLISHPROFILE_PRODUCTION }} + package: frontend/build/standalone \ No newline at end of file diff --git a/.gitignore b/.gitignore index 9e20a2f9..e631b111 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ # dependencies /node_modules +/liquibase /.pnp .pnp.js /.idea/* @@ -38,10 +39,14 @@ yarn-error.log* next-env.d.ts /.idea /.vscode +/.idea +/.vscode /*.zip .github/workflows/new-file-upload-system_forestgeo-livesite.yml /.fleet /frontend/sampledata /frontend/scripts -/frontend/sqlscripting /frontend/.vscode +/documentation/.obsidian +/documentation/.obsidian/* +frontend/liquibase/ \ No newline at end of file diff --git a/frontend/.eslintrc.cjs b/frontend/.eslintrc.cjs new file mode 100644 index 00000000..60a5cd40 --- /dev/null +++ b/frontend/.eslintrc.cjs @@ -0,0 +1,22 @@ +module.exports = { + parser: '@typescript-eslint/parser', + extends: ['next', 'plugin:@typescript-eslint/recommended', 'plugin:prettier/recommended'], + settings: { + next: { + rootDir: '.' + } + }, + plugins: ['@typescript-eslint', 'unused-imports', 'prettier', 'import'], + rules: { + 'react-hooks/exhaustive-deps': 'off', + semi: ['error', 'always'], + 'unused-imports/no-unused-imports': 'error', + '@typescript-eslint/no-explicit-any': 'off', + '@typescript-eslint/no-unused-vars': 'off', + '@typescript-eslint/no-var-requires': 'off', + 'unused-imports/no-unused-vars': 'off', + 'react-hooks/rules-of-hooks': 'off', + 'no-case-declarations': 'off', + 'prettier/prettier': 'error' + } +}; diff --git a/frontend/.eslintrc.js b/frontend/.eslintrc.js deleted file mode 100644 index 37fb4284..00000000 --- a/frontend/.eslintrc.js +++ /dev/null @@ -1,18 +0,0 @@ -module.exports = { - parser: "@typescript-eslint/parser", - extends: ["next", "plugin:@typescript-eslint/recommended"], - settings: { - next: { - rootDir: "." - } - }, - plugins: ["@typescript-eslint", "unused-imports"], - rules: { - "react-hooks/exhaustive-deps": "off", - "semi": ["error", "always"], - "unused-imports/no-unused-imports": "error", - "@typescript-eslint/no-explicit-any": "off", - "@typescript-eslint/no-unused-vars": "off", - "unused-imports/no-unused-vars": "off" - } -}; \ No newline at end of file diff --git a/frontend/.gitignore b/frontend/.gitignore index 0068adca..d320e8b3 100644 --- a/frontend/.gitignore +++ b/frontend/.gitignore @@ -17,7 +17,6 @@ # production /build /sampledata -/sqlscripting /scripts DigiCertGlobalRootCA.crt.pem @@ -49,3 +48,16 @@ yarn-error.log* /playwright-report/ /blob-report/ /playwright/.cache/ +/.fleet +/.vscode +/.idea +/test-results/ +/playwright-report/ +/blob-report/ +/playwright/.cache/ + +*storybook.log +/test-results/ +/playwright-report/ +/blob-report/ +/playwright/.cache/ diff --git a/frontend/.prettierrc.json b/frontend/.prettierrc.json new file mode 100644 index 00000000..af6398a9 --- /dev/null +++ b/frontend/.prettierrc.json @@ -0,0 +1,13 @@ +{ + "semi": true, + "singleQuote": true, + "trailingComma": "none", + "bracketSpacing": true, + "jsxBracketSameLine": false, + "arrowParens": "avoid", + "proseWrap": "preserve", + "endOfLine": "lf", + "printWidth": 160, + "tabWidth": 2, + "useTabs": false +} diff --git a/frontend/CHANGELOG.md b/frontend/CHANGELOG.md new file mode 100644 index 00000000..63f4382e --- /dev/null +++ b/frontend/CHANGELOG.md @@ -0,0 +1,326 @@ +# App Changelog History + +## Acacia Update (Completion: 9/15/2024) + +###### Note: all project files received updated formatting and linting. Files that received ONLY these changes have been omitted for brevity. + +### Workflow + +1. Dedicated dev branch workflow created +2. Main branch workflow isolated to focus only on main updates + +--- + +### Documentation + +1. Writerside-powered documentation application created and added to Github Pages (accessible + at [this page](https://smithsonian.github.io/ForestGEO/)) + +--- + +### Formatting and Configuration + +1. ESLint properly integrated +2. Prettier installed and integrated + +--- + +### Testing + +1. Formatting reapplied to test files +2. Skips applied to incomplete test cases (will ensure that broken tests do not crash builds) +3. New rollover modal test file created - only framework, automatically skipped and pending full build-out + +--- + +### Webpages + +1. NextJS-powered default error pages applied to all existing webpages. This will in turn ensure that potential + breakages don't immediately cause cascading issues or full app shutdown. + +#### Dashboard + +1. Full overhaul and reorganization of components +2. Baseline user documentation provided to user +3. Animation tool to highlight Github Feedback Modal added +4. User information display (will show name, email, role, and available sites) +5. Recent changes menu implemented -- Skeleton-powered display of 5 most recent changes to site schema. Only displayed + after site, plot, and census is applied to reduce number of queries being made to schema. + +#### Hub Layout file + +1. Reorganization and debugging core issues around site/plot/census selection +2. Debounce implemented -- severe performance issues were being caused by constant reloading of site/plot/census + selection +3. Site/plot/census re-selection system was replaced fully using React useRef hooks. This ensures that dispatches are + externally monitored and only fired when necessary +4. Previously, site/plot/census were being reloaded simultaneously. This in turn caused synchronization issues when + attempting to re-select site/plot/census. System replaced with staggered loading system and sequential list loading + instead +5. Acacia version text removed -- no core versions are needed for anything other than historical app status tracking. +6. Github Feedback modal integrated into all page footers -- this will allow users to provide feedback at any point in + the site + +#### View Data page + +1. Previous page implementation was moved to dedicated component file. + +#### Validations page + +1. User-facing display implemented to display existing stored validations +2. ValidationCard component created to show validation name, description, and SQL implementation +3. ValidationCard toggle implemented to allow enabling/disabling of each validation +4. Page interaction has been restricted to only allow users with administrator/db-admin status to make modifications, + etc. + +#### View Full Table page + +1. Dedicated page to show viewfulltable materialized view information created. + +#### Sidebar + +1. now uses Select components to handle site/plot/census selection instead of previous modal system. +2. navigation menu has been updated to reflect new navigation menu and endpoint names have been updated to be clearer + +--- + +### API changes + +#### frontend/app/api/auth/[[...nextauth]]/route.ts + +1. Dedicated type UserStatusRoles created to centralize allowed set of user roles +2. IsAdmin state deprecated and removed -- not granular enough + +#### frontend/app/api/changelog/overview/[changelogType]/[[...options]]/route.ts + +1. Handles changelog history requests +2. Populates dashboard page's recent changes component + +#### frontend/app/api/cmprevalidation/[dataType]/[[...slugs]]/route.ts + +1. Queries placed reworked to correctly integrate with updated schema system +2. HTTPResponses status code macro applied to replace hardcoded status codes + +#### frontend/app/api/details/cmid/route.ts + +1. Query reworked to correctly integrate with updated schema system + +#### frontend/app/api/fetchall/[[...slugs]]/route.ts + +1. System reworked to ensure compatibility with all implemented RDS types. +2. buildQuery function revamped accordingly + +#### frontend/app/api/fixeddata/[dataType]/[[...slugs]]/route.ts + +1. Formatting applied + +###### GET + +1. FixedData cases' queries updated to correctly work with updated schemas +2. New tables/cases added: + 1. `personnelrole` + 2. `sitespecificvalidations` + 3. `roles` + 4. `measurementssummary` + 5. `viewfulltable` + +###### POST + +1. insertIDs object created to return IDs of inserted rows +2. `view` query config implementation updated +3. other cases have been updated to more efficiently integrate into insertIDs system + +###### PATCH + +1. similarly updated in line with POST endpoint + +###### DELETE + +1. similarly updated in line with POST endpoint + +#### frontend/app/api/postvalidation/route.ts + +1. Postvalidation summary statistics calculation endpoint +2. Statistics queries: + 1. `number of records by quadrat` + 2. `all stem records by quadrat (count only)` + 3. `live stem records by quadrat (count only)` + 4. `tree records by quadrat (count only)` + 5. `number of dead or missing stems by census` + 6. `trees outside of plot limits` + 7. `stems with largest DBH/HOM measurements by species` + 8. `all trees that were recorded in last census that are NOT in current census` + 9. `number of new stems per quadrat per census` + 10. `quadrats with most and least new stems per census` + 11. `number of dead stems per quadrat per census` + 12. `number of dead stems per species per census` + +#### frontend/app/api/refreshviews/[view]/[schema]/route.ts + +1. triggers materialized view table refresh + +#### frontend/app/api/rollover/[dataType]/[[...slugs]]/route.ts + +1. Census rollover handler +2. Uses NextJS's dynamic routing system to allow for dynamic query placement and execution + +#### frontend/app/api/specieslimits/[speciesID]/route.ts + +1. Species-set limits handler +2. Currently only adding/updating species limits is supported -- actual interaction system is still in progress. + +#### frontend/app/api/sqlmonitor/route.ts + +1. monitoring endpoint for SQL state, added for debugging/testing purposes + +#### frontend/app/api/structure/[schema]/route.ts + +1. returns structure of a given schema (by table name/column name) + +#### frontend/app/api/validations/crud/route.ts + +1. interface point for validations page +2. not yet complete, but will eventually allow for CRUD operations on validations + +#### frontend/app/api/validations/procedures/[validationType]/route.ts + +1. revised handler to process validation procedures + +#### frontend/app/api/validations/updatepassedvalidations/route.ts + +1. post-validation IsValidated field update system +2. revised from previous iteration to instead use generic utils.ts function + +#### frontend/app/api/validations/validationerrordisplay/route.ts + +1. returns a list of existing validation errors organized by CoreMeasurementID -- these are then used to correlate + viewed rows with validation errors if they exist + +#### frontend/app/api/validations/validationlist/route.ts + +1. returns a list of existing validations +2. additional processing incorporated for customized site-specific validations, but this is not yet being used anywhere. + +--- + +### Context Updates + +#### Lock Animation Context + +1. increased timeout delay from 3 seconds to 5 seconds + +--- + +### Components Updates + +#### DataGrid Columns + +1. all applicable datagrid usages' column array instances have been centralized here +2. additional formatting applied to allow customized column header formatting +3. customized cell and edit cell rendering added +4. some exceptions exist -- for instances where specific additional handling is needed, column states are directly + defined in the datagrid components themselves. + 1. `alltaxonomiesview` -- specieslimits column customized addition + +#### GitHub Feedback Modal + +1. allows users to submit github issue creation requests directly from modal +2. provides categories for selection and attaches additional metadata to newly created issue + +#### Rollover Modal + +1. allows users to customize new census creation +2. users can select to rollover personnel, quadrats, or neither +3. further customized selection of which personnel/quadrats are being rolled over + +#### Rollover Stems Modal + +1. template system to allow direct rollover of stems information +2. proof of concept more so than anything else + +#### Validation Modal + +1. additional proof of concept system +2. attempt at creating a dedicated modal for validation of rows that were missed during validation (for example, in the + event of data loss or some other connection failure event) +3. not currently used anywhere + +#### DataGrid Updates + +1. The DataGridCommons generic datagrid instance has been replaced by the IsolatedDataGridCommons instance, which + isolates as much information as possible to the generic instance rather than the existing DataGridCommons, which + requires parameter drilling of all MUI X DataGrid parameters. Current datagrids using this new implementation are: + - `alltaxonomiesview` + - `attributes` + - `personnel` + - `quadratpersonnel` + - `quadrats` + - `roles` + - `stemtaxonomiesview` +2. found that attempting to use typescript runtime utilities to create "default" initial states for each RDS type was + causing cascading failures. Due to the way that runtime utility functions work, no data was actually reaching the + datagrids importing those initial states + 1. replaced with manual definition of initial states -- planning on centralizing this to another place, similar to + the `datagridcolumns.tsx` file +3. `measurementssummaryview` datagrid instance added as a replacement to the previously defined summary page + +#### Re-Entry Data Modal + +1. data re-entry system has been reworked to allow customized field selection for repetition (can thus remove foreign + key reference columns from reentry) +2. `clusters` and `hiddenColumns` added -- the `clusters` object provides grouping options for reentry (preventing + reentry fields from rendering in a single very very long line) and the `hiddenColumns` object allows columns that + shouldn't need re-entry to + be omitted. + +#### ProcessPersonnel Handler + +1. new customized personnel upload handler, now that a `roles` table has been created and `CensusID` and `RoleID` + foreign keys have been added to the `personnel` table +2. attempts insert or update of roles data, and retrieves either insertID or existing ID +3. retrieved RoleID is used to insert or update `personnel` table next + +#### Theme Updates + +1. JoyUI Tooltip's default instance has been customized - touch listener has been disabled, leaveDelay has been set to + 100ms and pointerEvents has been set to `none` + +--- + +### Upload System Updates + +1. autocompletion system has been enhanced and reworked to auto-fill unit values and area dimensions. Genus/Species + breakdown has also been simplified, but needs updating +2. additional toggle added to datagrid display at the review stage to apply autocompletion changes. User can choose not + to if desired +3. countdown timer has been removed +4. validations -- update validation stage has been removed. validation toggle now automatically triggers on completion + of all validation procedures +5. validation system has also been fully overhauled -- instead of stored procedures, validaiton procedures are now + stored as dedicated queries in the `catalog.validationprocedures` table. When validations are run, the respective SQL + queries is pulled from the table. This makes the system flexible and modifiable, as stored procedures are + significantly more immutable. +6. materialized view tables -- `measurementssummary` and `viewfulltable`. Due to extended query time for the existing + SQL views and the lack of support for materialized views in MySQL 8, a manual version was implemented. these + dedicated tables are populated via stored procedure and have significantly reduced query time. Upload system has been + enhanced to call both table refresh commands as part of the Azure upload process. +7. materialized view reload has been adjusted to be optional. user should be able to continue the process even if one or + more of the views fails. + +--- + +### SQL Updates + +1. Schema has been been updated -- new tables added: + 1. `roles` - outlines user roles + 2. `specieslimits` - allows setting min/max bounds on measurements + 3. `specimens` - recording specimen data (added on request by ForestGEO) + 4. `unifiedchangelog` - partitioned table that tracks all changes to all tables in schema. All tables have triggers + that automatically update the `unifiedchangelog` on every change + 5. `sitespecificvalidations` - for specific validations applicable only to the host site +2. validation stored procedures have been deprecated and removed, replaced with `validationprocedures` and + `sitespecificvalidations` tables +3. migration script set has been completed and tested +4. trigger definitions have been recorded +5. view implementations have been updated + diff --git a/frontend/README.md b/frontend/README.md index bf5be152..bafe24a1 100644 --- a/frontend/README.md +++ b/frontend/README.md @@ -1,5 +1,6 @@ # The ForestGEO Data Entry App +liquibase generate-changelog --exclude-objects="\b\w*view\w*\b" A cloud-native web application built to accelerate the pace of research for the Smithsonian Institution's Forest Global Earth Observatory (ForestGEO). ForestGEO is a global forest research network, unparalleled in size and scope, comprised of ecologists and research sites dedicated to @@ -31,12 +32,12 @@ documentation [here](https://github.com/ForestGeoHack/ForestGEO/wiki/ForestGEO-A 1. Before running the project, you must create an `.env.local` file in the overhead directory with the following values: - - `AZURE_AD_CLIENT_ID` - - `AZURE_AD_CLIENT_SECRET` - - `AZURE_AD_TENANT_ID` - - `NEXTAUTH_SECRET` - - `NEXTAUTH_URL` - - all `AZURE_` values must be created/populated from Azure's App Registration portal + - `AZURE_AD_CLIENT_ID` + - `AZURE_AD_CLIENT_SECRET` + - `AZURE_AD_TENANT_ID` + - `NEXTAUTH_SECRET` + - `NEXTAUTH_URL` + - all `AZURE_` values must be created/populated from Azure's App Registration portal 2. Once `.env.local` is made, run `npm install` from the overhead directory to install dependencies 3. Run `npm run build` to compile/optimize the application for running 4. Run `npm run dev` to create a dev instance of the application locally on your machine @@ -64,12 +65,12 @@ In order from left to right, please note the following points of interest: - `[plotKey]`: this is the first required variable when accessing this endpoint -- you will have to add some string `plotKey` to the end of the URL: `.../browse/[your plot key]` in order to successfully view the page. - - wrapping a folder in `[]` will designate that folder as a **required** dynamic parameter - - wrapping in `[...folderName]` designates `folderName` as a catch-all route. All following - values after `folderName` (i.e., `.../a/b` will return `folderName = [a, b]` ) - - wrapping in `[[...folderName]]` designates `folderName` as an *optional* catch-all route. As - expected, all values for/after `folderName` will be returned as part of the dynamic route, - but `undefined` will also be returned if no value is entered at all (instead of a 404 error) + - wrapping a folder in `[]` will designate that folder as a **required** dynamic parameter + - wrapping in `[...folderName]` designates `folderName` as a catch-all route. All following + values after `folderName` (i.e., `.../a/b` will return `folderName = [a, b]` ) + - wrapping in `[[...folderName]]` designates `folderName` as an _optional_ catch-all route. As + expected, all values for/after `folderName` will be returned as part of the dynamic route, + but `undefined` will also be returned if no value is entered at all (instead of a 404 error) - `[plotNum]`: second required variable when accessing this endpoint - your resulting endpoint will look like (example) `http://localhost:3000/browse/plotKey/plotNum`. @@ -78,21 +79,21 @@ In order from left to right, please note the following points of interest: ### Release Notes (v0.1.0): - endpoints have been added and routed to require a plot key/number combination for access - - initial state has been converted to new `Plot {key: 'none', num: 0}` instead of `''` + - initial state has been converted to new `Plot {key: 'none', num: 0}` instead of `''` - MUI JoyUI has been partially implemented as a replacement for MaterialUI. However, due to time limitations, MaterialUI has still been incorporated into converted sections from ForestGeoHack - - The current plan is to solely implement either NextUI or ChakraUI instead of either of these - options, and future updates will include this information. + - The current plan is to solely implement either NextUI or ChakraUI instead of either of these + options, and future updates will include this information. - `SelectPlotProps` has been removed and replaced with NextJS dynamic routing (each endpoint will dynamically retrieve plot information). Endpoints have been updated to reflect dynamic param-based retrieval - - The navigation bar has been updated to use useEffect to push live endpoint updates when the - plot is changed (if you are at an endpoint and the plot is changed, the page will be reloaded - to reflect that) + - The navigation bar has been updated to use useEffect to push live endpoint updates when the + plot is changed (if you are at an endpoint and the plot is changed, the page will be reloaded + to reflect that) - New components/moved-over information: - - `Fileuploadcomponents` --> css code has been udpated to be dark theme-friendly - - `FileList` --> moved over - - `Loginlogout` --> created component, login/logout process has been relegated to avatar icon - dropdown menu - - `Plotselection` --> partially created from SelectPlot, changed to utilize dynamic - routing/selection instead of requiring a new dropdown in each page \ No newline at end of file + - `Fileuploadcomponents` --> css code has been udpated to be dark theme-friendly + - `FileList` --> moved over + - `Loginlogout` --> created component, login/logout process has been relegated to avatar icon + dropdown menu + - `Plotselection` --> partially created from SelectPlot, changed to utilize dynamic + routing/selection instead of requiring a new dropdown in each page diff --git a/frontend/TESTING.md b/frontend/TESTING.md new file mode 100644 index 00000000..e69de29b diff --git a/frontend/__tests__/api/cmid.test.tsx b/frontend/__tests__/api/cmid.test.tsx deleted file mode 100644 index 30c82bbc..00000000 --- a/frontend/__tests__/api/cmid.test.tsx +++ /dev/null @@ -1,78 +0,0 @@ -import {describe, it, expect, vi} from 'vitest'; -import {GET} from '@/app/api/details/cmid/route'; -import {getConn, runQuery} from '@/components/processors/processormacros'; -import {createMocks} from 'node-mocks-http'; -import {NextRequest} from 'next/server'; - -vi.mock('@/components/processors/processormacros', () => ({ - getConn: vi.fn(), - runQuery: vi.fn(), -})); - -describe('GET /api/details/cmid', () => { - it('should return 200 and data if query is successful', async () => { - const mockData = [{ - CoreMeasurementID: 1, - PlotName: 'Plot 1', - QuadratName: 'Quadrat 1', - PlotCensusNumber: 1, - StartDate: '2023-01-01', - EndDate: '2023-01-31', - FirstName: 'John', - LastName: 'Doe', - SpeciesName: 'Species 1', - }]; - - const conn = { - query: vi.fn().mockResolvedValue([mockData]), - release: vi.fn(), - }; - - (getConn as jest.Mock).mockResolvedValue(conn); - (runQuery as jest.Mock).mockResolvedValue(mockData); - - const {req, res} = createMocks({ - method: 'GET', - url: 'http://localhost/api/details/cmid?cmid=1&schema=test_schema', - }); - - const mockReq = new NextRequest(req.url); - const response = await GET(mockReq); - - expect(response.status).toBe(200); - expect(await response.json()).toEqual(mockData.map(row => ({ - coreMeasurementID: row.CoreMeasurementID, - plotName: row.PlotName, - quadratName: row.QuadratName, - plotCensusNumber: row.PlotCensusNumber, - censusStart: row.StartDate, - censusEnd: row.EndDate, - personnelName: `${row.FirstName} ${row.LastName}`, - speciesName: row.SpeciesName, - }))); - }); - - it('should return 500 if there is a database error', async () => { - (getConn as jest.Mock).mockRejectedValue(new Error('Database error')); - - const {req, res} = createMocks({ - method: 'GET', - url: 'http://localhost/api/details/cmid?cmid=1&schema=test_schema', - }); - - const mockReq = new NextRequest(req.url); - - await expect(GET(mockReq)).rejects.toThrow('Database error'); - }); - - it('should return 400 if schema is not provided', async () => { - const {req, res} = createMocks({ - method: 'GET', - url: 'http://localhost/api/details/cmid?cmid=1', - }); - - const mockReq = new NextRequest(req.url); - - await expect(GET(mockReq)).rejects.toThrow('no schema variable provided!'); - }); -}); diff --git a/frontend/__tests__/api/cmprevalidation.test.tsx b/frontend/__tests__/api/cmprevalidation.test.tsx deleted file mode 100644 index cf23f13c..00000000 --- a/frontend/__tests__/api/cmprevalidation.test.tsx +++ /dev/null @@ -1,100 +0,0 @@ -import {describe, it, expect, vi} from 'vitest'; -import {GET} from '@/app/api/cmprevalidation/[dataType]/[[...slugs]]/route'; -import {createMocks} from 'node-mocks-http'; -import {getConn, runQuery} from '@/components/processors/processormacros'; -import {HTTPResponses} from '@/config/macros'; -import {NextRequest} from 'next/server'; - -vi.mock('@/components/processors/processormacros', () => ({ - getConn: vi.fn(), - runQuery: vi.fn(), -})); - -describe('GET /api/cmprevalidation/[dataType]/[[...slugs]]', () => { - it('should return 412 if required tables are empty', async () => { - const conn = { - query: vi.fn().mockResolvedValue([[]]), - release: vi.fn(), - }; - - (getConn as jest.Mock).mockResolvedValue(conn); - (runQuery as jest.Mock).mockResolvedValue([]); - - const {req} = createMocks({ - method: 'GET', - url: 'http://localhost/api/cmprevalidation/attributes/schema/1/1', - }); - - const mockReq = new NextRequest(req.url); - - const response = await GET(mockReq, {params: {dataType: 'attributes', slugs: ['schema', '1', '1']}}); - - expect(response.status).toBe(HTTPResponses.PRECONDITION_VALIDATION_FAILURE); - }); - - it('should return 200 if required tables are populated', async () => { - const conn = { - query: vi.fn().mockResolvedValue([[1]]), - release: vi.fn(), - }; - - (getConn as jest.Mock).mockResolvedValue(conn); - (runQuery as jest.Mock).mockResolvedValue([[1]]); - - const {req} = createMocks({ - method: 'GET', - url: 'http://localhost/api/cmprevalidation/attributes/schema/1/1', - }); - - const mockReq = new NextRequest(req.url); - - const response = await GET(mockReq, {params: {dataType: 'attributes', slugs: ['schema', '1', '1']}}); - - expect(response.status).toBe(HTTPResponses.OK); - }); - - it('should return 412 if there is a database error', async () => { - (getConn as jest.Mock).mockRejectedValue(new Error('Database error')); - - const {req} = createMocks({ - method: 'GET', - url: 'http://localhost/api/cmprevalidation/attributes/schema/1/1', - }); - - const mockReq = new NextRequest(req.url); - - const response = await GET(mockReq, {params: {dataType: 'attributes', slugs: ['schema', '1', '1']}}); - - expect(response.status).toBe(HTTPResponses.PRECONDITION_VALIDATION_FAILURE); - }); - - it('should return 400 if slugs are missing', async () => { - const {req} = createMocks({ - method: 'GET', - url: 'http://localhost/api/cmprevalidation/attributes', - }); - - const mockReq = new NextRequest(req.url); - - try { - await GET(mockReq, {params: {dataType: 'attributes', slugs: []}}); - } catch (e) { - expect((e as Error).message).toBe('incorrect slugs provided'); - } - }); - - it('should return 400 if slugs are incorrect', async () => { - const {req} = createMocks({ - method: 'GET', - url: 'http://localhost/api/cmprevalidation/attributes/schema', - }); - - const mockReq = new NextRequest(req.url); - - try { - await GET(mockReq, {params: {dataType: 'attributes', slugs: ['schema']}}); - } catch (e) { - expect((e as Error).message).toBe('incorrect slugs provided'); - } - }); -}); diff --git a/frontend/__tests__/api/fetchall.test.tsx b/frontend/__tests__/api/fetchall.test.tsx deleted file mode 100644 index 91743645..00000000 --- a/frontend/__tests__/api/fetchall.test.tsx +++ /dev/null @@ -1,94 +0,0 @@ -import {describe, it, expect, vi, beforeEach} from 'vitest'; -import {GET} from '@/app/api/fetchall/[[...slugs]]/route'; -import {getConn, runQuery} from '@/components/processors/processormacros'; -import MapperFactory, {IDataMapper} from '@/config/datamapper'; -import {createMocks} from 'node-mocks-http'; -import {NextRequest} from 'next/server'; - -// Mocking getConn and runQuery functions -vi.mock('@/components/processors/processormacros', () => ({ - getConn: vi.fn(), - runQuery: vi.fn() -})); - -// Mocking MapperFactory -vi.mock('@/config/datamapper', () => ({ - default: { - getMapper: vi.fn() - } -})); - -describe('GET /api/fetchall/[[...slugs]]', () => { - beforeEach(() => { - vi.clearAllMocks(); - }); - - it('should return 500 if schema is not provided', async () => { - const {req} = createMocks({ - method: 'GET', - url: 'http://localhost/api/fetchall/plots' - }); - - const mockReq = new NextRequest(req.url); - - await expect(GET(mockReq, {params: {slugs: ['plots']}})).rejects.toThrow('Schema selection was not provided to API endpoint'); - }); - - it('should return 500 if fetchType is not provided', async () => { - const {req} = createMocks({ - method: 'GET', - url: 'http://localhost/api/fetchall?schema=test_schema' - }); - - const mockReq = new NextRequest(req.url); - - await expect(GET(mockReq, {params: {slugs: []}})).rejects.toThrow('fetchType was not correctly provided'); - }); - - it('should return 200 and data if query is successful', async () => { - const mockConn = {release: vi.fn()}; - (getConn as ReturnType).mockResolvedValue(mockConn); - const mockResults = [{PlotID: 1, PlotName: 'Plot 1'}]; - (runQuery as ReturnType).mockResolvedValue(mockResults); - - const mockMapper: IDataMapper = { - mapData: vi.fn().mockReturnValue([{plotID: 1, plotName: 'Plot 1'}]), - demapData: vi.fn() - }; - (MapperFactory.getMapper as ReturnType).mockReturnValue(mockMapper); - - const {req} = createMocks({ - method: 'GET', - url: 'http://localhost/api/fetchall/plots?schema=test_schema' - }); - - const mockReq = new NextRequest(req.url); - const response = await GET(mockReq, {params: {slugs: ['plots']}}); - - expect(response.status).toBe(200); - const data = await response.json(); - expect(data).toEqual([{plotID: 1, plotName: 'Plot 1'}]); - expect(getConn).toHaveBeenCalled(); - expect(runQuery).toHaveBeenCalledWith(mockConn, expect.stringContaining('SELECT')); - expect(mockMapper.mapData).toHaveBeenCalledWith(mockResults); - expect(mockConn.release).toHaveBeenCalled(); - }); - - it('should return 500 if there is a database error', async () => { - const mockConn = {release: vi.fn()}; - (getConn as ReturnType).mockResolvedValue(mockConn); - (runQuery as ReturnType).mockRejectedValue(new Error('Database error')); - - const {req} = createMocks({ - method: 'GET', - url: 'http://localhost/api/fetchall/plots?schema=test_schema' - }); - - const mockReq = new NextRequest(req.url); - - await expect(GET(mockReq, {params: {slugs: ['plots']}})).rejects.toThrow('Call failed'); - expect(getConn).toHaveBeenCalled(); - expect(runQuery).toHaveBeenCalledWith(mockConn, expect.stringContaining('SELECT')); - expect(mockConn.release).toHaveBeenCalled(); - }); -}); diff --git a/frontend/__tests__/api/filehandlers/deletefile.test.tsx b/frontend/__tests__/api/filehandlers/deletefile.test.tsx deleted file mode 100644 index f8ad99f2..00000000 --- a/frontend/__tests__/api/filehandlers/deletefile.test.tsx +++ /dev/null @@ -1,85 +0,0 @@ -import {describe, it, expect, vi, beforeEach} from 'vitest'; -import {DELETE} from '@/app/api/filehandlers/deletefile/route'; -import {getContainerClient} from '@/config/macros/azurestorage'; -import {createMocks} from 'node-mocks-http'; -import {NextRequest} from 'next/server'; - -vi.mock('@/config/macros/azurestorage', () => ({ - getContainerClient: vi.fn() -})); - -describe('DELETE /api/filehandlers/deletefile', () => { - beforeEach(() => { - vi.clearAllMocks(); - }); - - it('should return 400 if container name or filename is missing', async () => { - const {req} = createMocks({ - method: 'DELETE', - url: 'http://localhost/api/filehandlers/deletefile' - }); - - const mockReq = new NextRequest(req.url); - - const response = await DELETE(mockReq); - expect(response.status).toBe(400); - const data = await response.text(); - expect(data).toBe('Container name and filename are required'); - }); - - it('should return 400 if container client creation fails', async () => { - (getContainerClient as ReturnType).mockResolvedValue(null); - - const {req} = createMocks({ - method: 'DELETE', - url: 'http://localhost/api/filehandlers/deletefile?container=testContainer&filename=testFile' - }); - - const mockReq = new NextRequest(req.url); - - const response = await DELETE(mockReq); - expect(response.status).toBe(400); - const data = await response.text(); - expect(data).toBe('Container name and filename are required'); - }); - - it('should return 200 and delete the file if successful', async () => { - const mockBlobClient = { - delete: vi.fn().mockResolvedValue({}) - }; - const mockContainerClient = { - getBlobClient: vi.fn().mockReturnValue(mockBlobClient) - }; - - (getContainerClient as ReturnType).mockResolvedValue(mockContainerClient); - - const {req} = createMocks({ - method: 'DELETE', - url: 'http://localhost/api/filehandlers/deletefile?container=testContainer&filename=testFile' - }); - - const mockReq = new NextRequest(req.url); - - const response = await DELETE(mockReq); - expect(response.status).toBe(200); - const data = await response.text(); - expect(data).toBe('File deleted successfully'); - expect(mockBlobClient.delete).toHaveBeenCalled(); - }); - - it('should return 500 if there is an error', async () => { - (getContainerClient as ReturnType).mockRejectedValue(new Error('Test error')); - - const {req} = createMocks({ - method: 'DELETE', - url: 'http://localhost/api/filehandlers/deletefile?container=testContainer&filename=testFile' - }); - - const mockReq = new NextRequest(req.url); - - const response = await DELETE(mockReq); - expect(response.status).toBe(500); - const data = await response.text(); - expect(data).toBe('Test error'); - }); -}); diff --git a/frontend/__tests__/api/filehandlers/downloadallfiles.test.tsx b/frontend/__tests__/api/filehandlers/downloadallfiles.test.tsx deleted file mode 100644 index 30f07d12..00000000 --- a/frontend/__tests__/api/filehandlers/downloadallfiles.test.tsx +++ /dev/null @@ -1,108 +0,0 @@ -import {describe, it, expect, vi, beforeEach} from 'vitest'; -import {GET} from '@/app/api/filehandlers/downloadallfiles/route'; -import {getContainerClient} from '@/config/macros/azurestorage'; -import {createMocks} from 'node-mocks-http'; -import {NextRequest} from 'next/server'; - -vi.mock('@/config/macros/azurestorage', () => ({ - getContainerClient: vi.fn() -})); - -describe('GET /api/filehandlers/downloadallfiles', () => { - beforeEach(() => { - vi.clearAllMocks(); - }); - - it('should return 400 if plot or census is not provided', async () => { - const {req} = createMocks({ - method: 'GET', - url: 'http://localhost/api/filehandlers/downloadallfiles' - }); - - const mockReq = new NextRequest(req.url); - - const response = await GET(mockReq); - expect(response.status).toBe(400); - const data = await response.text(); - expect(data).toBe('Both plot and census parameters are required'); - }); - - it('should return 400 if container client creation fails', async () => { - (getContainerClient as ReturnType).mockResolvedValue(null); - - const {req} = createMocks({ - method: 'GET', - url: 'http://localhost/api/filehandlers/downloadallfiles?plot=testPlot&census=testCensus' - }); - - const mockReq = new NextRequest(req.url); - - const response = await GET(mockReq); - expect(response.status).toBe(400); - const data = await response.json(); - expect(data.statusText).toBe('Container client creation error'); - }); - - it('should return 200 and list of blobs if successful', async () => { - const mockContainerClient = { - listBlobsFlat: vi.fn().mockImplementation(function* () { - yield { - name: 'testBlob', - metadata: { - user: 'testUser', - FormType: 'testFormType', - FileErrorState: JSON.stringify([{stemtag: 'testStemtag', tag: 'testTag', validationErrorID: 1}]) - }, - properties: { - lastModified: new Date() - } - }; - }) - }; - - (getContainerClient as ReturnType).mockResolvedValue(mockContainerClient); - - const {req} = createMocks({ - method: 'GET', - url: 'http://localhost/api/filehandlers/downloadallfiles?plot=testPlot&census=testCensus' - }); - - const mockReq = new NextRequest(req.url); - - const response = await GET(mockReq); - expect(response.status).toBe(200); - const data = await response.json(); - expect(data.responseMessage).toBe('List of files'); - expect(data.blobData).toHaveLength(1); - expect(data.blobData[0]).toEqual({ - key: 1, - name: 'testBlob', - user: 'testUser', - formType: 'testFormType', - fileErrors: [{stemtag: 'testStemtag', tag: 'testTag', validationErrorID: 1}], - date: expect.any(String) // Date will be serialized to a string - }); - }); - - it('should return 400 if there is an error in blob listing', async () => { - const mockContainerClient = { - listBlobsFlat: vi.fn().mockImplementation(() => { - throw new Error('Blob listing error'); - }) - }; - - (getContainerClient as ReturnType).mockResolvedValue(mockContainerClient); - - const {req} = createMocks({ - method: 'GET', - url: 'http://localhost/api/filehandlers/downloadallfiles?plot=testPlot&census=testCensus' - }); - - const mockReq = new NextRequest(req.url); - - const response = await GET(mockReq); - expect(response.status).toBe(400); - const data = await response.json(); - expect(data.message).toBe('Blob listing error'); - }); -}); diff --git a/frontend/__tests__/api/filehandlers/downloadfile.test.tsx b/frontend/__tests__/api/filehandlers/downloadfile.test.tsx deleted file mode 100644 index d4b726b2..00000000 --- a/frontend/__tests__/api/filehandlers/downloadfile.test.tsx +++ /dev/null @@ -1,112 +0,0 @@ -import {describe, it, expect, vi, beforeEach} from 'vitest'; -import {GET} from '@/app/api/filehandlers/downloadfile/route'; -import {getContainerClient} from '@/config/macros/azurestorage'; -import {createMocks} from 'node-mocks-http'; -import {NextRequest} from 'next/server'; -import { - BlobServiceClient, - generateBlobSASQueryParameters, - StorageSharedKeyCredential -} from '@azure/storage-blob'; - -vi.mock('@azure/storage-blob', async () => { - const actual = await vi.importActual('@azure/storage-blob'); - return { - ...actual, - BlobServiceClient: { - fromConnectionString: vi.fn() - }, - generateBlobSASQueryParameters: vi.fn() - }; -}); - -vi.mock('@/config/macros/azurestorage', () => ({ - getContainerClient: vi.fn() -})); - -describe('GET /api/filehandlers/downloadfile', () => { - beforeEach(() => { - vi.clearAllMocks(); - }); - - it('should return 400 if container name, filename, or storage connection string is missing', async () => { - const {req} = createMocks({ - method: 'GET', - url: 'http://localhost/api/filehandlers/downloadfile' - }); - - const mockReq = new NextRequest(req.url); - - const response = await GET(mockReq); - expect(response.status).toBe(400); - const data = await response.text(); - expect(data).toBe('Container name, filename, and storage connection string are required'); - }); - - it('should return 400 if container client creation fails', async () => { - process.env.AZURE_STORAGE_CONNECTION_STRING = 'test-connection-string'; - (getContainerClient as ReturnType).mockResolvedValue(null); - - const {req} = createMocks({ - method: 'GET', - url: 'http://localhost/api/filehandlers/downloadfile?container=testContainer&filename=testFile' - }); - - const mockReq = new NextRequest(req.url); - - const response = await GET(mockReq); - expect(response.status).toBe(400); - const data = await response.text(); - expect(data).toBe('Failed to get container client'); - }); - - it('should return 200 and SAS token URL if successful', async () => { - process.env.AZURE_STORAGE_CONNECTION_STRING = 'test-connection-string'; - - const mockContainerClient = { - getBlobClient: vi.fn().mockReturnValue({ - url: 'https://testaccount.blob.core.windows.net/testcontainer/testblob' - }) - }; - - (getContainerClient as ReturnType).mockResolvedValue(mockContainerClient); - - (BlobServiceClient.fromConnectionString as ReturnType).mockReturnValue({ - credential: new StorageSharedKeyCredential('testaccount', 'testkey') - }); - - (generateBlobSASQueryParameters as ReturnType).mockReturnValue({ - toString: () => 'sastoken' - }); - - const {req} = createMocks({ - method: 'GET', - url: 'http://localhost/api/filehandlers/downloadfile?container=testContainer&filename=testFile' - }); - - const mockReq = new NextRequest(req.url); - - const response = await GET(mockReq); - expect(response.status).toBe(200); - const data = await response.json(); - expect(data.url).toBe('https://testaccount.blob.core.windows.net/testcontainer/testblob?sastoken'); - }); - - it('should return 500 if there is an error', async () => { - process.env.AZURE_STORAGE_CONNECTION_STRING = 'test-connection-string'; - - (getContainerClient as ReturnType).mockRejectedValue(new Error('Test error')); - - const {req} = createMocks({ - method: 'GET', - url: 'http://localhost/api/filehandlers/downloadfile?container=testContainer&filename=testFile' - }); - - const mockReq = new NextRequest(req.url); - - const response = await GET(mockReq); - expect(response.status).toBe(500); - const data = await response.text(); - expect(data).toBe('Test error'); - }); -}); diff --git a/frontend/__tests__/api/filehandlers/storageload.test.tsx b/frontend/__tests__/api/filehandlers/storageload.test.tsx deleted file mode 100644 index cda3f901..00000000 --- a/frontend/__tests__/api/filehandlers/storageload.test.tsx +++ /dev/null @@ -1,116 +0,0 @@ -import {describe, it, expect, vi, beforeEach} from 'vitest'; -import {POST} from '@/app/api/filehandlers/storageload/route'; -import {getContainerClient, uploadValidFileAsBuffer} from '@/config/macros/azurestorage'; -import {createMocks} from 'node-mocks-http'; -import {NextRequest} from 'next/server'; - -vi.mock('@/config/macros/azurestorage', () => ({ - getContainerClient: vi.fn(), - uploadValidFileAsBuffer: vi.fn() -})); - -describe.skip('POST /api/filehandlers/storageload', () => { - beforeEach(() => { - vi.clearAllMocks(); - }); - - const createMockRequest = (url: string, formData: FormData) => { - const {req} = createMocks({ - method: 'POST', - url: url, - headers: { - 'Content-Type': 'multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW' - } - }); - - if (formData.get('file') === null) { - console.log('createMockRequest: received empty formData: ', formData); - return new NextRequest(req.url!, {method: 'POST'}); - } - req.formData = async () => formData; - - const headers = new Headers(); - for (const [key, value] of Object.entries(req.headers)) { - headers.append(key, value as string); - } - - const body = `------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name="file"; filename="testfile.txt"\r\nContent-Type: text/plain\r\n\r\ntest content\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW--`; - - return new NextRequest(req.url!, {method: 'POST', headers, body}); - }; - - it('should return 500 if container client creation fails', async () => { - (getContainerClient as ReturnType).mockRejectedValue(new Error('Test error')); - - const formData = new FormData(); - formData.append('file', new File(['test content'], 'testfile.txt')); - - const mockReq = createMockRequest('http://localhost/api/filehandlers/storageload?fileName=testfile.txt&plot=testplot&census=testcensus&user=testuser&formType=testform', formData); - - const response = await POST(mockReq); - expect(response.status).toBe(500); - const data = await response.json(); - expect(data.responseMessage).toBe('Error getting container client.'); - expect(data.error).toBe('Test error'); - }); - - it('should return 500 if file upload fails', async () => { - (getContainerClient as ReturnType).mockResolvedValue({}); - (uploadValidFileAsBuffer as ReturnType).mockRejectedValue(new Error('Upload error')); - - const formData = new FormData(); - formData.append('file', new File(['test content'], 'testfile.txt')); - - const mockReq = createMockRequest('http://localhost/api/filehandlers/storageload?fileName=testfile.txt&plot=testplot&census=testcensus&user=testuser&formType=testform', formData); - - const response = await POST(mockReq); - expect(response.status).toBe(500); - const data = await response.json(); - expect(data.responseMessage).toBe('File Processing error'); - expect(data.error).toBe('Upload error'); - }); - - it('should return 200 if file upload is successful', async () => { - const mockUploadResponse = {requestId: '12345', _response: {status: 200}}; - (getContainerClient as ReturnType).mockResolvedValue({}); - (uploadValidFileAsBuffer as ReturnType).mockResolvedValue(mockUploadResponse); - - const formData = new FormData(); - formData.append('file', new File(['test content'], 'testfile.txt')); - - const mockReq = createMockRequest('http://localhost/api/filehandlers/storageload?fileName=testfile.txt&plot=testplot&census=testcensus&user=testuser&formType=testform', formData); - - const response = await POST(mockReq); - expect(response.status).toBe(200); - const data = await response.json(); - expect(data.message).toBe('Insert to Azure Storage successful'); - }); - - it('should return 400 if file is missing', async () => { - const formData = new FormData(); - console.log('test formData: ', formData); - - const mockReq = createMockRequest('http://localhost/api/filehandlers/storageload?fileName=testfile.txt&plot=testplot&census=testcensus&user=testuser&formType=testform', formData); - - const response = await POST(mockReq); - expect(response.status).toBe(400); - const data = await response.text(); - expect(data).toBe('File is required'); - }); - - it('should return 500 for unknown errors', async () => { - (getContainerClient as ReturnType).mockResolvedValue({}); - (uploadValidFileAsBuffer as ReturnType).mockRejectedValue('Unknown error'); - - const formData = new FormData(); - formData.append('file', new File(['test content'], 'testfile.txt')); - - const mockReq = createMockRequest('http://localhost/api/filehandlers/storageload?fileName=testfile.txt&plot=testplot&census=testcensus&user=testuser&formType=testform', formData); - - const response = await POST(mockReq); - expect(response.status).toBe(500); - const data = await response.json(); - expect(data.responseMessage).toBe('File Processing error'); - expect(data.error).toBe('Unknown error'); - }); -}); diff --git a/frontend/__tests__/dashboard.test.tsx b/frontend/__tests__/dashboard.test.tsx new file mode 100644 index 00000000..9e682f10 --- /dev/null +++ b/frontend/__tests__/dashboard.test.tsx @@ -0,0 +1,84 @@ +import { render, screen } from '@testing-library/react'; +import { beforeAll, beforeEach, describe, expect, it, Mock, vi } from 'vitest'; +import DashboardPage from '@/app/(hub)/dashboard/page'; +import { useSession } from 'next-auth/react'; +import '@testing-library/jest-dom/vitest'; +import { LockAnimationProvider } from '@/app/contexts/lockanimationcontext'; + +// Mock the useSession hook +vi.mock('next-auth/react', () => ({ + useSession: vi.fn() +})); + +// Define a mock session object +const mockSession = { + user: { + name: 'John Doe', + email: 'john.doe@example.com', + userStatus: 'admin', + sites: [ + { schemaName: 'site1', siteName: 'Site 1' }, + { schemaName: 'site2', siteName: 'Site 2' } + ] + } +}; + +describe.skip('DashboardPage Component', () => { + // Mock the authenticated session before all tests + beforeAll(() => { + (useSession as Mock).mockReturnValue({ data: mockSession, status: 'authenticated' }); + }); + + beforeEach(() => { + // Reset mocks before each test + vi.clearAllMocks(); + }); + + const renderWithProvider = () => { + return render( + + + + ); + }; + + it("displays the user's name", () => { + renderWithProvider(); + + // Assert that the user's name is displayed + expect(screen.getByText(/Welcome, John Doe!/i)).toBeInTheDocument(); + }); + + it("displays the user's email", () => { + renderWithProvider(); + + // Assert that the user's email is displayed + // To handle multiple instances of "Registered Email:" + const emails = screen.getAllByText(/Registered Email:/i); + expect(emails).length.greaterThanOrEqual(1); // Expect only one occurrence or handle all + expect(emails[0]).toBeInTheDocument(); + expect(screen.getByText(/john.doe@example.com/i)).toBeInTheDocument(); + }); + + it("displays the user's permission status", () => { + renderWithProvider(); + + // Same for "Assigned Role:" + const roles = screen.getAllByText(/Assigned Role:/i); + expect(roles).length.greaterThanOrEqual(1); // Handle according to your use case + expect(roles[0]).toBeInTheDocument(); + + expect(screen.getByText(/global/i)).toBeInTheDocument(); + }); + + it('displays the list of allowed sites', () => { + renderWithProvider(); + + // Assert that the allowed sites are displayed + const sites = screen.getAllByText(/You have access to the following sites:/i); + expect(sites).length.greaterThanOrEqual(1); // Handle according to your use case + expect(sites[0]).toBeInTheDocument(); + expect(screen.getByText(/Site 1/i)).toBeInTheDocument(); + expect(screen.getByText(/Site 2/i)).toBeInTheDocument(); + }); +}); diff --git a/frontend/__tests__/login.test.tsx b/frontend/__tests__/login.test.tsx new file mode 100644 index 00000000..33135b1d --- /dev/null +++ b/frontend/__tests__/login.test.tsx @@ -0,0 +1,59 @@ +import { render, screen } from '@testing-library/react'; +import { beforeAll, beforeEach, describe, expect, it, Mock, vi } from 'vitest'; +import LoginPage from '@/app/(login)/login/page'; +import { useSession } from 'next-auth/react'; +import { redirect } from 'next/navigation'; +import '@testing-library/jest-dom/vitest'; + +// Mock the useSession hook and next/navigation functions +vi.mock('next-auth/react', () => ({ + useSession: vi.fn() +})); + +vi.mock('next/navigation', () => ({ + redirect: vi.fn() +})); + +// Mock the UnauthenticatedSidebar component +vi.mock('@/components/unauthenticatedsidebar', () => ({ + default: () =>
Unauthenticated Sidebar
+})); + +// Define a mock session object to use across tests +const mockSession = { + user: { + email: 'user@example.com', + userStatus: 'admin', + sites: [{ name: 'Site 1' }, { name: 'Site 2' }], + allsites: [{ name: 'Site 1' }, { name: 'Site 2' }] + } +}; + +describe('LoginPage Component with authenticated session', () => { + // Set up the mock authenticated session once for all tests + beforeAll(() => { + (useSession as Mock).mockReturnValue({ data: mockSession, status: 'authenticated' }); + }); + + beforeEach(() => { + // Reset mocks before each test + vi.clearAllMocks(); + }); + + it('redirects to dashboard when the user is authenticated', () => { + render(); + + // Assert that redirect was called to navigate to the dashboard + expect(redirect).toHaveBeenCalledWith('/dashboard'); + }); + + // Add more tests here that assume the user is authenticated + it('does not render the unauthenticated sidebar when the user is authenticated', () => { + render(); + + // Assert that the unauthenticated sidebar is not present + expect(screen.queryByTestId('unauthenticated-sidebar')).not.toBeInTheDocument(); + }); + + // Additional tests can go here, all assuming the user is already logged in... +}); diff --git a/frontend/__tests__/loginpage.test.tsx b/frontend/__tests__/loginpage.test.tsx deleted file mode 100644 index 347b29fa..00000000 --- a/frontend/__tests__/loginpage.test.tsx +++ /dev/null @@ -1,49 +0,0 @@ -// loginPage.test.tsx - -import {render, screen} from '@testing-library/react'; -import {describe, it, vi, beforeEach, Mock, expect} from 'vitest'; -import LoginPage from '@/app/(login)/login/page'; -import {useSession} from 'next-auth/react'; -import {redirect} from 'next/navigation'; -import '@testing-library/jest-dom/vitest'; - -// Mock the useSession hook and next/navigation functions -vi.mock('next-auth/react', () => ({ - useSession: vi.fn(), -})); - -vi.mock('next/navigation', () => ({ - redirect: vi.fn(), -})); - -// Mock the UnauthenticatedSidebar component -vi.mock('@/components/unauthenticatedsidebar', () => ({ - default: () =>
Unauthenticated Sidebar
, -})); - -describe('LoginPage Component', () => { - beforeEach(() => { - // Reset mocks before each test - vi.clearAllMocks(); - }); - - it('renders the unauthenticated sidebar if the user is unauthenticated', () => { - // Mock unauthenticated status - (useSession as Mock).mockReturnValue({data: null, status: 'unauthenticated'}); - - render(); - - // Assert that the sidebar is present and visible - expect(screen.getByTestId('unauthenticated-sidebar')).toBeInTheDocument(); - }); - - it('redirects to dashboard if the user is authenticated', () => { - // Mock authenticated status - (useSession as Mock).mockReturnValue({data: {user: {}}, status: 'authenticated'}); - - render(); - - // Assert that redirect was called to navigate to the dashboard - expect(redirect).toHaveBeenCalledWith('/dashboard'); - }); -}); diff --git a/frontend/__tests__/sidebar.test.tsx b/frontend/__tests__/sidebar.test.tsx new file mode 100644 index 00000000..a330c7f6 --- /dev/null +++ b/frontend/__tests__/sidebar.test.tsx @@ -0,0 +1,167 @@ +// Mock ResizeObserver +class ResizeObserver { + observe() {} + + unobserve() {} + + disconnect() {} +} + +global.ResizeObserver = ResizeObserver; + +import { render, screen } from '@testing-library/react'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import Sidebar from '@/components/sidebar'; +import { useSession } from 'next-auth/react'; +import { useOrgCensusContext, usePlotContext, useSiteContext } from '@/app/contexts/userselectionprovider'; +import { useOrgCensusListContext, usePlotListContext, useSiteListContext } from '@/app/contexts/listselectionprovider'; +import '@testing-library/jest-dom/vitest'; +import { UserAuthRoles } from '@/config/macros'; +import { Session } from 'next-auth/core/types'; +import { CensusDateRange } from '@/config/sqlrdsdefinitions/timekeeping'; + +// Mock the necessary hooks +vi.mock('next-auth/react', () => ({ + useSession: vi.fn() +})); + +vi.mock('@/app/contexts/userselectionprovider', () => ({ + useSiteContext: vi.fn(), + usePlotContext: vi.fn(), + useOrgCensusContext: vi.fn(), + useSiteDispatch: vi.fn(), + usePlotDispatch: vi.fn(), + useOrgCensusDispatch: vi.fn() +})); + +vi.mock('@/app/contexts/listselectionprovider', () => ({ + useSiteListContext: vi.fn(), + usePlotListContext: vi.fn(), + useOrgCensusListContext: vi.fn(), + useSiteListDispatch: vi.fn(), + usePlotListDispatch: vi.fn(), + useOrgCensusListDispatch: vi.fn() +})); + +vi.mock('next/navigation', async () => { + const actual = await vi.importActual('next/navigation'); + return { + ...actual, + useRouter: vi.fn().mockReturnValue({ + route: '/', + pathname: '/', + query: {}, + asPath: '/', + push: vi.fn(), + replace: vi.fn(), + back: vi.fn() + }), + usePathname: vi.fn().mockReturnValue('/mock-path'), + useSearchParams: vi.fn().mockReturnValue({ + get: vi.fn() + }) + }; +}); + +describe.skip('Sidebar Component', () => { + // Mock session data + const mockSession = { + user: { + name: 'John Doe', + email: 'john.doe@example.com', + userStatus: 'global' as UserAuthRoles, + sites: [ + { siteID: 1, siteName: 'Site 1', schemaName: 'schema1' }, + { siteID: 2, siteName: 'Site 2', schemaName: 'schema2' } + ], + allsites: [ + { siteID: 1, siteName: 'Site 1', schemaName: 'schema1' }, + { siteID: 2, siteName: 'Site 2', schemaName: 'schema2' } + ] + }, + expires: '9999-12-31T23:59:59.999Z' // Add this line to satisfy the 'Session' type + }; + + // Mock site, plot, and census contexts + const mockSite = { siteID: 1, siteName: 'Site 1', schemaName: 'schema1' }; + const mockPlot = { plotID: 1, plotName: 'Plot 1', numQuadrats: 5 }; + const mockCensus = { + plotCensusNumber: 1, + dateRanges: [{ censusID: 1, startDate: new Date('2023-01-01'), endDate: new Date('2023-01-31') } as CensusDateRange], + plotID: 1, + censusIDs: [1, 2], + description: 'Test Census' + }; + const mockCensusList = [ + { + plotCensusNumber: 1, + dateRanges: [{ censusID: 1, startDate: new Date('2023-01-01'), endDate: new Date('2023-01-31') } as CensusDateRange], + plotID: 1, + censusIDs: [1, 2], + description: 'Test Census' + } + ]; + + beforeEach(() => { + vi.clearAllMocks(); + + // Mock session + vi.mocked(useSession).mockReturnValue({ + data: mockSession, + status: 'authenticated', + update: function (_data?: any): Promise { + throw new Error('Function not implemented.'); + } + }); + + // Mock contexts + vi.mocked(useSiteContext).mockReturnValue(mockSite); + vi.mocked(usePlotContext).mockReturnValue(mockPlot); + vi.mocked(useOrgCensusContext).mockReturnValue(mockCensus); + + // Mock list contexts + vi.mocked(useSiteListContext).mockReturnValue([mockSite]); + vi.mocked(usePlotListContext).mockReturnValue([mockPlot]); + vi.mocked(useOrgCensusListContext).mockReturnValue(mockCensusList); + }); + + it('renders the sidebar', async () => { + render(); + + // Check if the sidebar renders the user name and admin status + expect(screen.getByTestId('login-logout-component')).toBeInTheDocument(); + + // Check if the site, plot, and census dropdowns are rendered using data-testid + expect(screen.getByTestId('site-select-component')).toBeInTheDocument(); + expect(screen.getByTestId('plot-select-component')).toBeInTheDocument(); + expect(screen.getByTestId('census-select-component')).toBeInTheDocument(); + }); + + it('displays the selected site, plot, and census', async () => { + render(); + + // Check that the selected site, plot, and census are displayed correctly + expect(screen.getByTestId('selected-site-name')).toHaveTextContent('Site: Site 1'); + expect(screen.getByTestId('selected-plot-name')).toHaveTextContent('Plot: Plot 1'); + expect(screen.getByTestId('selected-census-plotcensusnumber')).toHaveTextContent('Census: 1'); + + // Check dates + expect(screen.getByTestId('selected-census-dates')).toHaveTextContent('First Record: Sun Jan 01 2023'); + expect(screen.getByTestId('selected-census-dates')).toHaveTextContent('Last Record: Tue Jan 31 2023'); + }); + + // it('opens the "Add New Census" modal when clicked', async () => { + // render(); + // + // // Find and click the "Add New Census" button + // const addCensusButton = screen.getByTestId('add-new-census-button'); + // expect(addCensusButton).toBeInTheDocument(); + // + // await act(async () => { + // fireEvent.click(addCensusButton); + // }); + // + // // Verify that the modal opens successfully using its test ID + // expect(screen.getByTestId('rollover-modal')).toBeInTheDocument(); + // }); +}); diff --git a/frontend/app/(hub)/dashboard/error.tsx b/frontend/app/(hub)/dashboard/error.tsx new file mode 100644 index 00000000..5ca1d123 --- /dev/null +++ b/frontend/app/(hub)/dashboard/error.tsx @@ -0,0 +1,16 @@ +'use client'; + +import React from 'react'; +import { Box, Button, Typography } from '@mui/joy'; + +const ErrorPage = ({ error, reset }: { error: Error; reset: () => void }) => { + return ( + + Something went wrong - Dashboard + {error.message} + + + ); +}; + +export default ErrorPage; diff --git a/frontend/app/(hub)/dashboard/page.tsx b/frontend/app/(hub)/dashboard/page.tsx index b367d8cb..5343d85f 100644 --- a/frontend/app/(hub)/dashboard/page.tsx +++ b/frontend/app/(hub)/dashboard/page.tsx @@ -1,4 +1,4 @@ -"use client"; +'use client'; import { Accordion, @@ -6,350 +6,309 @@ import { AccordionGroup, AccordionSummary, Box, - IconButton, + Card, + CardContent, + Chip, + Divider, List, ListItem, + ListItemContent, + ListSubheader, + Skeleton, + Stack, + Step, + Stepper, Tooltip, Typography -} from "@mui/joy"; +} from '@mui/joy'; +import HelpOutlineOutlinedIcon from '@mui/icons-material/HelpOutlineOutlined'; import WarningIcon from '@mui/icons-material/Warning'; -import TravelExploreIcon from '@mui/icons-material/TravelExplore'; -import Avatar from "@mui/joy/Avatar"; -import { CensusLogo, PlotLogo } from "@/components/icons"; -import { useOrgCensusContext, usePlotContext, useSiteContext } from "@/app/contexts/userselectionprovider"; +import CheckIcon from '@mui/icons-material/Check'; +import { useLockAnimation } from '@/app/contexts/lockanimationcontext'; +import { useSession } from 'next-auth/react'; +import { useOrgCensusContext, usePlotContext, useSiteContext } from '@/app/contexts/userselectionprovider'; +import { useEffect, useState } from 'react'; +import { UnifiedChangelogRDS } from '@/config/sqlrdsdefinitions/core'; +import moment from 'moment'; +import Avatar from '@mui/joy/Avatar'; export default function DashboardPage() { - + const { triggerPulse, isPulsing } = useLockAnimation(); + const { data: session } = useSession(); const currentSite = useSiteContext(); const currentPlot = usePlotContext(); const currentCensus = useOrgCensusContext(); + const userName = session?.user?.name; + const userEmail = session?.user?.email; + const userRole = session?.user?.userStatus; + const allowedSites = session?.user?.sites; + + const [changelogHistory, setChangelogHistory] = useState(Array(5)); + const [isLoading, setIsLoading] = useState(false); + + async function loadChangelogHistory() { + try { + setIsLoading(true); + + // Check if the required data is available, otherwise return a padded array + if (!currentSite || !currentPlot || !currentCensus) { + setChangelogHistory(Array(5).fill({})); + return; + } + + const response = await fetch( + `/api/changelog/overview/unifiedchangelog/${currentPlot?.plotID}/${currentCensus?.plotCensusNumber}?schema=${currentSite?.schemaName}`, + { method: 'GET' } + ); + const results: UnifiedChangelogRDS[] = await response.json(); + + // Pad the array if it has less than 5 items + const paddedResults = [...results]; + while (paddedResults.length < 5) { + paddedResults.push({}); // Push empty objects to pad the array + } + + setChangelogHistory(paddedResults); + } catch (error) { + console.error('Failed to load changelog history', error); + setChangelogHistory(Array(5).fill({})); // Fallback to an empty padded array in case of an error + } finally { + setIsLoading(false); + } + } + + useEffect(() => { + loadChangelogHistory().catch(console.error); + }, [currentSite, currentPlot, currentCensus]); - const attributeNote = "NOTE: If a code can be used for more than one status (e.g. The code “L” for a leaning tree, could\n" + - "apply to either a dead or alive stem), or if a code does not indicate any of the above status\n" + - "options, the status column should be left blank."; - const quadratNote = "NOTE: The x and y coordinates (“startx” and “starty”) refer to the distance in meters between\n" + - "the quadrat under question and lowest, left-most corner of the entire plot (or\n" + - "wherever your plot origin, or 0,0 coordinates are)."; - const censusNote1 = "NOTE: Each of the multiple stems should be included in these files. You may indicate in the codes\n" + - "field which one is the main stem (if the tree has only one stem, you do not have to include the main\n" + - "stem code). The rest of the information should be repeated for each multiple stem. Make sure that\n" + - "the information (species code, date, etc.) is exactly the same for all multiple stems of the same tree. "; - const censusNote2 = "NOTE: The dataset for each census should only contain trees and stems that were tagged and\n" + - "measured from that census. The dataset for subsequent censuses should contain all live stems from\n" + - "the previous census. Dead or lost stems should have the appropriate codes to indicate their absence\n" + - "in subsequent censuses."; return ( - - Please use this guide to navigate through this - app's key features and functionalities. - Understanding the Sidebar - - The sidebar is intended to provide you with quick and easy access to the different features this app provides. - - - - - - - - - - - - - - Select a Site - - - - - Now that you have logged in, you will see the sidebar is currently empty, with the exception of a - clickable - button saying "Select Site"
- In order to fully access the website, you must select the site you are currently working in. - - - - -
- Once you have selected a site, you should see a second menu option slide out to enable you to select a - plot. + + + + + Welcome, {userName}! + + + + }> + + + Core Functions and Features + + + + + Use the selection menus to pick your site, plot, and census + + + + + + + The navigation menu will not become visible until you have selected a site, plot, and census. + + + + + + You will need to submit supporting data before being able to submit new measurements for your census. + + + + + Stem & Plot Details - Use this supporting menu to enter fixed data for your census. + + + + Stem & Plot Details + + + + + Stem Codes - Submit attribute information for stems here. Does not require a census. + + + + + + + Personnel - Submit personnel working in your census here. Requires a census. + + + + + + + Quadrats - Submit quadrat information for stems here. Requires a census. + + + + + + + Species List - Submit species and taxonomy information for stems here. Does not require a census. + + + + + + + Plot-Species List - See existing taxonomy information for stems in your plot and census here.{' '} + Requires a census. + + + + + + + + } onClick={triggerPulse}> + + This is a feedback form! + + + + + + + + + User-Specific Info -
-
- {currentSite !== undefined && ( - - - - - - - Select a Plot - - - - - Following the same format as the Site, clicking on the Select Plot link will open a dialog box to allow - you to select a plot. -
- After selecting a plot, you will see the navigation menu appear along with the census selection box. - However, it will remain disabled until you select a census! - - - - - -
-
-
- )} - {currentSite !== undefined && currentPlot !== undefined && ( - - - - - - - Select a Census - - - - - Censuses are organized by Plot Census Number!
- Within each plot census number, you can create any number of date ranges, which are then associated with an internal census ID.
- Please note -- when you are uploading or adding information, it will be added to the nearest OPEN census.
- You can only add information to an OPENED census!
- You can only have one open census at a time!
- Please note:
- Censuses cannot be updated or deleted. If you need to add revising information, please start a new census, add the respective information, - and close that census to add it to the overall plot census category.
- Please use the Reopen/Close census to add or close new date ranges to the existing Plot Census Number.
- Please use the Start New Census button to create a new Plot Census Number, or an entirely new census. -
-
-
- )} -
- {currentSite !== undefined && currentPlot !== undefined && currentCensus !== undefined && ( - <> - Navigating Through the Website - - - - Understanding the Site System - - - - When you first log in and select a site, plot, and census, you will see a series of loading screens appear.
- If you are starting a new census, you will see that the View Measurements button is disabled with a warning badge, - and that one or more of the Supporting Data Views menu links have a red danger badge attached.
- This is the Prevalidation system. You must populate all of the tables that have a red badge in order to be able to add measurements.
- You will also see a button in the bottom left corner that says "Reload Prevalidation". This button will manually re-check the respective tables - associated with the prevalidation system, in case the system does not automatically run after you add data to the system. -
-
-
- - - Measurements Hub - - - - The Measurements Hub contains two primary links &em; the View Measurements link and the View Uploaded Files link.
- You can use the View Measurements link to review uploaded information once it's been inserted, and you can use the - View Uploaded Files link to review past loaded files for the respective plot & census combination you are adding data to, as well as - delete or download them.

- The View Measurements link and upload system will be disabled until you successfully populate all of the supporting data views. - Once this is done and the prevalidation system reloads, you will be able to click on the View Measurements link. - Use the Upload button there to upload information conforming to the census.txt
+ }> + + + + Assigned Role: + + + {userRole} + + + }> + Is this incorrect? + + + + Registered Email: + + {userEmail} + + + + + + You have access to the following sites: - -
-
- - - Supporting Data Views - - - - The Supporting Data Views expands to allow you to modify the different moving parts of a - census that you would originally modify through CTFSWeb. - - - - Attributes - - - - These are the codes used by field personnel to describe the condition of a tree, stem or - measurement. These codes are locally derived and can be in any language. These tree - measurement codes will eventually be inserted into the TSMAttributes table, which is a permanent - table. - - - code: one or more letters that describe or explain the condition of a tree, stem, - or - measurement (e.g. “L”) - description: a free text description of the code (e.g. “leaning”) - - - status: one of six standardized terms used as a summary category for the code and the - condition of the stem which it describes: - - - - - - - - alive: the stem is alive - alive-not measured: the stem is alive but was not measured - dead: the ENTIRE TREE is dead - missing: field crews missed this stem, and it was not measured during the - census - - broken below: the stem was previously ≥ 1 cm dbh, but in this census was found alive - but broken off, now with a dbh less than 1 cm - - stem dead: the stem is dead and/or not found - - - e.g. We may call a tree in the field “MS;R” – MS (multiple stems) could have an - “alive” status on this table and R (description: resprout) would have “broken below.” - - - - - - - - Personnel - - - - This file contains the names of the people who are or were involved with the plot, as well as the - role - that they played. If a person has played more than one role (for example she was a field - technician in - one census, then promoted to field supervisor in a later census), then that name should be entered - twice. This file should have three columns, as designated below. - - - firstname: the first (given) name of the person - lastname: the last name (surname) of the person - role: the role the person played in the census. This should match exactly one of - the - descriptions in the role.txt file. - - - - - - Quadrats - - - - This file contains a complete list of all quadrats used in your plot. - - - quadrat: the name of the quadrat, e.g. 0002 - startx: the x coordinate of the lower left corner of the quadrat, e.g. - 0 - - starty: the y coordinate of the lower left corner of the quadrat, e.g. 40 - - - - - - - dimx: the x dimension of the quadrat (in meters), e.g. 20 - dimy: the y dimension of the quadrat (in meters), e.g. 20 - - - - - - Subquadrats (Optional) - - - - If needed, you can submit subquadrat information in a file format similar to the quadrats file.
- This is not needed in order to complete a census, BUT
- If you do not add or upload subquadrats and try to reference them in your census.txt file, the values will be IGNORED. -
- - {/* {label: "quadrat"}, {label: "xindex"}, {label: "yindex"}, {label: "unit"}, {label: "orderindex"}], */} - subquadrat: the name of the subquadrat - quadrat: the overhead quadrat it belongs to - dimx: the x-dimensions of the subquadrat (default is 5m) - dimy: the y-dimensions of the subquadrat (default is 5m) - xindex: starting x-coordinates (top left corner) of subquadrat - yindex: starting y-coordinates (top left corner) of subquadrat - unit: Please provide the SI unit (mm, cm, dm, m, Dm, hm, km); default is meters - orderindex: the order of the subquadrat within the quadrat at large, starting from top left corner - -
-
- - - Species - - - - This file is integral to a key table in the system, so take time to review it for spelling errors, - etc. Make - sure the IDLevels are filled in. There should be at least one species code for unidentified - species if - your plot includes species not yet identified. There are four required columns (“spcode,” “genus,” - “species,” and “IDLevel”); the rest are optional. - - - - spcode: a code used in the field to identify the species of the tree - - Most ForestGEO sites use six letter codes where the first four are from the genus - name and - the last two are from the species. If two species yield the same code, then an alternative - letter or number as the last character may be used to differentiate them. For example, - codes for Shorea macroptera subsp. baillonii and Shorea macrophylla, would both be - SHORMA. The species codes ended up being SHORMB and SHORMC, respectively. - You should use a similar naming convention for each morphospecies incorporating - details - you know. For example, using LITSBL (Litsea “Big Leaf”) or APORS1 (Aporosa sp. 1) are - fine as long as each code applies to only one morphospecies. These can be changed once - identification is more complete - Other combinations are also acceptable. Some sites use 3 letters from the genus and - 3 - from the species, while others use 4 letters instead of 6 (2 letters from the genus and 2 - from - the species). - - - genus: the taxonomic genus name according to the APG system. In case of an - unknown genus, use “Unidentified.” - species: the species part of the Latin name; may be a morphospecies - name. - idlevel: the deepest taxonomic level for which full identification is known. The - IDLevel is limited to the values of: species, subspecies, genus, family, none, or multiple. “None” is used - when the family is not known. “Multiple” is used when the name may include a mixture of more - than one species. - family: the taxonomic family name (optional) - authority: author of the species (optional) - subspecies: subspecies identifier (optional) - subspeciesauthority: authority of subspecies (optional) - - - -
-
-
-
-
- - )} + + {allowedSites?.map(site => ( + } sx={{ flexBasis: 'auto' }}> + + {site.siteName} + + + ))} + + + + + + + Recent Changes + + + {changelogHistory.map((changelog, index) => ( + + {changelog.id} + + } + > + + + + + + + {changelog.operation} ON {changelog.tableName} at {moment(changelog?.changeTimestamp).format('YYYY-MM-DD HH:mm:ss')} + + + + + + + Updating: + + + + + Old Row + + + {changelog.oldRowState && Object.keys(changelog.oldRowState).length > 0 ? ( + Object.entries(changelog.oldRowState).map(([key, value]) => ( + + + {key}: {value ?? 'NULL'} + + + )) + ) : ( + + No previous data available + + )} + + + + to + + + + + New Row + + + {changelog.newRowState && Object.keys(changelog.newRowState).length > 0 ? ( + Object.entries(changelog.newRowState).map(([key, value]) => ( + + + {key}: {value ?? 'NULL'} + + + )) + ) : ( + + No new data available + + )} + + + + + + + + + + + + ))} + + + + +
); } diff --git a/frontend/app/(hub)/error.tsx b/frontend/app/(hub)/error.tsx new file mode 100644 index 00000000..11c4c276 --- /dev/null +++ b/frontend/app/(hub)/error.tsx @@ -0,0 +1,16 @@ +'use client'; + +import React from 'react'; +import { Box, Button, Typography } from '@mui/joy'; + +const ErrorPage = ({ error, reset }: { error: Error; reset: () => void }) => { + return ( + + Something went wrong - Core Hub Layout Page + {error.message} + + + ); +}; + +export default ErrorPage; diff --git a/frontend/app/(hub)/fixeddatainput/alltaxonomies/error.tsx b/frontend/app/(hub)/fixeddatainput/alltaxonomies/error.tsx new file mode 100644 index 00000000..4ea21499 --- /dev/null +++ b/frontend/app/(hub)/fixeddatainput/alltaxonomies/error.tsx @@ -0,0 +1,16 @@ +'use client'; + +import React from 'react'; +import { Box, Button, Typography } from '@mui/joy'; + +const ErrorPage = ({ error, reset }: { error: Error; reset: () => void }) => { + return ( + + Something went wrong - Species List Page + {error.message} + + + ); +}; + +export default ErrorPage; diff --git a/frontend/app/(hub)/fixeddatainput/alltaxonomies/page.tsx b/frontend/app/(hub)/fixeddatainput/alltaxonomies/page.tsx index 3f5d260b..f035cbdb 100644 --- a/frontend/app/(hub)/fixeddatainput/alltaxonomies/page.tsx +++ b/frontend/app/(hub)/fixeddatainput/alltaxonomies/page.tsx @@ -1,5 +1,5 @@ -import AllTaxonomiesViewDataGrid from "@/components/datagrids/applications/alltaxonomiesviewdatagrid"; +import IsolatedAllTaxonomiesViewDataGrid from '@/components/datagrids/applications/isolated/isolatedalltaxonomiesdatagrid'; export default function AllTaxonomiesPage() { - return ; + return ; } diff --git a/frontend/app/(hub)/fixeddatainput/attributes/error.tsx b/frontend/app/(hub)/fixeddatainput/attributes/error.tsx new file mode 100644 index 00000000..f5becb34 --- /dev/null +++ b/frontend/app/(hub)/fixeddatainput/attributes/error.tsx @@ -0,0 +1,16 @@ +'use client'; + +import React from 'react'; +import { Box, Button, Typography } from '@mui/joy'; + +const ErrorPage = ({ error, reset }: { error: Error; reset: () => void }) => { + return ( + + Something went wrong - Stem Codes Page + {error.message} + + + ); +}; + +export default ErrorPage; diff --git a/frontend/app/(hub)/fixeddatainput/attributes/page.tsx b/frontend/app/(hub)/fixeddatainput/attributes/page.tsx index 05b71016..1caea421 100644 --- a/frontend/app/(hub)/fixeddatainput/attributes/page.tsx +++ b/frontend/app/(hub)/fixeddatainput/attributes/page.tsx @@ -1,5 +1,5 @@ -import AttributesDataGrid from "@/components/datagrids/applications/attributesdatagrid"; +import IsolatedAttributesDataGrid from '@/components/datagrids/applications/isolated/isolatedattributesdatagrid'; export default function AttributesPage() { - return ; -} \ No newline at end of file + return ; +} diff --git a/frontend/app/(hub)/fixeddatainput/census/page.tsx b/frontend/app/(hub)/fixeddatainput/census/page.tsx deleted file mode 100644 index b602d83e..00000000 --- a/frontend/app/(hub)/fixeddatainput/census/page.tsx +++ /dev/null @@ -1,5 +0,0 @@ -import CensusDataGrid from "@/components/datagrids/applications/censusdatagrid"; - -export default function CensusPage() { - return ; -} \ No newline at end of file diff --git a/frontend/app/(hub)/fixeddatainput/personnel/error.tsx b/frontend/app/(hub)/fixeddatainput/personnel/error.tsx new file mode 100644 index 00000000..415d1b09 --- /dev/null +++ b/frontend/app/(hub)/fixeddatainput/personnel/error.tsx @@ -0,0 +1,16 @@ +'use client'; + +import React from 'react'; +import { Box, Button, Typography } from '@mui/joy'; + +const ErrorPage = ({ error, reset }: { error: Error; reset: () => void }) => { + return ( + + Something went wrong - Personnel Page + {error.message} + + + ); +}; + +export default ErrorPage; diff --git a/frontend/app/(hub)/fixeddatainput/personnel/page.tsx b/frontend/app/(hub)/fixeddatainput/personnel/page.tsx index fe23a45f..aea5f87d 100644 --- a/frontend/app/(hub)/fixeddatainput/personnel/page.tsx +++ b/frontend/app/(hub)/fixeddatainput/personnel/page.tsx @@ -1,5 +1,5 @@ -import PersonnelDataGrid from "@/components/datagrids/applications/personneldatagrid"; +import IsolatedPersonnelDataGrid from '@/components/datagrids/applications/isolated/isolatedpersonneldatagrid'; export default function PersonnelPage() { - return ; -} \ No newline at end of file + return ; +} diff --git a/frontend/app/(hub)/fixeddatainput/quadratpersonnel/error.tsx b/frontend/app/(hub)/fixeddatainput/quadratpersonnel/error.tsx new file mode 100644 index 00000000..26a71a70 --- /dev/null +++ b/frontend/app/(hub)/fixeddatainput/quadratpersonnel/error.tsx @@ -0,0 +1,16 @@ +'use client'; + +import React from 'react'; +import { Box, Button, Typography } from '@mui/joy'; + +const ErrorPage = ({ error, reset }: { error: Error; reset: () => void }) => { + return ( + + Something went wrong - QuadratPersonnel Page + {error.message} + + + ); +}; + +export default ErrorPage; diff --git a/frontend/app/(hub)/fixeddatainput/quadratpersonnel/page.tsx b/frontend/app/(hub)/fixeddatainput/quadratpersonnel/page.tsx index 15bbbaf2..b60bb63c 100644 --- a/frontend/app/(hub)/fixeddatainput/quadratpersonnel/page.tsx +++ b/frontend/app/(hub)/fixeddatainput/quadratpersonnel/page.tsx @@ -1,5 +1,5 @@ -import QuadratPersonnelDataGrid from "@/components/datagrids/applications/quadratpersonneldatagrid"; +import IsolatedQuadratPersonnelDataGrid from '@/components/datagrids/applications/isolated/isolatedquadratpersonneldatagrid'; export default function QuadratPersonnelPage() { - return ; -} \ No newline at end of file + return ; +} diff --git a/frontend/app/(hub)/fixeddatainput/quadrats/error.tsx b/frontend/app/(hub)/fixeddatainput/quadrats/error.tsx new file mode 100644 index 00000000..e78eac6a --- /dev/null +++ b/frontend/app/(hub)/fixeddatainput/quadrats/error.tsx @@ -0,0 +1,16 @@ +'use client'; + +import React from 'react'; +import { Box, Button, Typography } from '@mui/joy'; + +const ErrorPage = ({ error, reset }: { error: Error; reset: () => void }) => { + return ( + + Something went wrong - Quadrats Page + {error.message} + + + ); +}; + +export default ErrorPage; diff --git a/frontend/app/(hub)/fixeddatainput/quadrats/page.tsx b/frontend/app/(hub)/fixeddatainput/quadrats/page.tsx index 1e5be33f..f0aaa5c8 100644 --- a/frontend/app/(hub)/fixeddatainput/quadrats/page.tsx +++ b/frontend/app/(hub)/fixeddatainput/quadrats/page.tsx @@ -1,5 +1,5 @@ -import QuadratsDataGrid from "@/components/datagrids/applications/quadratsdatagrid"; +import IsolatedQuadratsDataGrid from '@/components/datagrids/applications/isolated/isolatedquadratsdatagrid'; export default function QuadratsPage() { - return ; -} \ No newline at end of file + return ; +} diff --git a/frontend/app/(hub)/fixeddatainput/species/page.tsx b/frontend/app/(hub)/fixeddatainput/species/page.tsx deleted file mode 100644 index 12bc9f8f..00000000 --- a/frontend/app/(hub)/fixeddatainput/species/page.tsx +++ /dev/null @@ -1,5 +0,0 @@ -import SpeciesDataGrid from "@/components/datagrids/applications/speciesdatagrid"; - -export default function SpeciesPage() { - return ; -} \ No newline at end of file diff --git a/frontend/app/(hub)/fixeddatainput/stemdimensions/page.tsx b/frontend/app/(hub)/fixeddatainput/stemdimensions/page.tsx deleted file mode 100644 index 86e457ff..00000000 --- a/frontend/app/(hub)/fixeddatainput/stemdimensions/page.tsx +++ /dev/null @@ -1,7 +0,0 @@ -"use client"; - -import StemDimensionsViewDataGrid from "@/components/datagrids/applications/stemdimensionsviewdatagrid"; - -export default function StemDimensionsPage() { - return ; -} \ No newline at end of file diff --git a/frontend/app/(hub)/fixeddatainput/stemtaxonomies/error.tsx b/frontend/app/(hub)/fixeddatainput/stemtaxonomies/error.tsx new file mode 100644 index 00000000..eaaea954 --- /dev/null +++ b/frontend/app/(hub)/fixeddatainput/stemtaxonomies/error.tsx @@ -0,0 +1,16 @@ +'use client'; + +import React from 'react'; +import { Box, Button, Typography } from '@mui/joy'; + +const ErrorPage = ({ error, reset }: { error: Error; reset: () => void }) => { + return ( + + Something went wrong - Plot-Species List + {error.message} + + + ); +}; + +export default ErrorPage; diff --git a/frontend/app/(hub)/fixeddatainput/stemtaxonomies/page.tsx b/frontend/app/(hub)/fixeddatainput/stemtaxonomies/page.tsx index b8fc7b70..74cd3733 100644 --- a/frontend/app/(hub)/fixeddatainput/stemtaxonomies/page.tsx +++ b/frontend/app/(hub)/fixeddatainput/stemtaxonomies/page.tsx @@ -1,7 +1,7 @@ -"use client"; +'use client'; -import StemTaxonomiesViewDataGrid from "@/components/datagrids/applications/stemtaxonomiesviewdatagrid"; +import IsolatedStemTaxonomiesViewDataGrid from '@/components/datagrids/applications/isolated/isolatedstemtaxonomiesviewdatagrid'; export default function StemTaxonomiesPage() { - return ; -} \ No newline at end of file + return ; +} diff --git a/frontend/app/(hub)/fixeddatainput/subquadrats/page.tsx b/frontend/app/(hub)/fixeddatainput/subquadrats/page.tsx deleted file mode 100644 index e7b50e0b..00000000 --- a/frontend/app/(hub)/fixeddatainput/subquadrats/page.tsx +++ /dev/null @@ -1,5 +0,0 @@ -import SubquadratsDataGrid from "@/components/client/sqdatagrid"; - -export default function SubquadratsPage() { - return ; -} \ No newline at end of file diff --git a/frontend/app/(hub)/layout.tsx b/frontend/app/(hub)/layout.tsx index 347048ad..5e917153 100644 --- a/frontend/app/(hub)/layout.tsx +++ b/frontend/app/(hub)/layout.tsx @@ -1,62 +1,42 @@ -"use client"; -import React, { useEffect, useCallback, useRef, useState } from "react"; -import { subtitle, title } from "@/config/primitives"; -import { useSession } from "next-auth/react"; -import { redirect, usePathname } from "next/navigation"; -import dynamic from "next/dynamic"; -import { Box } from "@mui/joy"; -import Divider from "@mui/joy/Divider"; -import { useLoading } from "@/app/contexts/loadingprovider"; -import { getAllSchemas } from "@/components/processors/processorhelperfunctions"; -import { - useSiteContext, - usePlotContext, - useOrgCensusContext, - useQuadratContext, -} from "@/app/contexts/userselectionprovider"; -import { - useOrgCensusListContext, - useOrgCensusListDispatch, - usePlotListContext, - usePlotListDispatch, - useQuadratListContext, - useQuadratListDispatch, - useSiteListDispatch, - useSubquadratListContext, - useSubquadratListDispatch, -} from "@/app/contexts/listselectionprovider"; -import {createAndUpdateCensusList} from "@/config/sqlrdsdefinitions/orgcensusrds"; -import {siteConfig} from "@/config/macros/siteconfigs"; +'use client'; +import React, { useCallback, useEffect, useRef, useState } from 'react'; +import { title } from '@/config/primitives'; +import { useSession } from 'next-auth/react'; +import { redirect, usePathname } from 'next/navigation'; +import dynamic from 'next/dynamic'; +import { Box, IconButton, Stack, Typography } from '@mui/joy'; +import Divider from '@mui/joy/Divider'; +import { useLoading } from '@/app/contexts/loadingprovider'; +import { getAllSchemas } from '@/components/processors/processorhelperfunctions'; +import { useOrgCensusContext, usePlotContext, useSiteContext } from '@/app/contexts/userselectionprovider'; +import { useOrgCensusListDispatch, usePlotListDispatch, useQuadratListDispatch, useSiteListDispatch } from '@/app/contexts/listselectionprovider'; +import { getEndpointHeaderName, siteConfig } from '@/config/macros/siteconfigs'; +import GithubFeedbackModal from '@/components/client/githubfeedbackmodal'; +import HelpOutlineOutlinedIcon from '@mui/icons-material/HelpOutlineOutlined'; +import { useLockAnimation } from '../contexts/lockanimationcontext'; +import { createAndUpdateCensusList } from '@/config/sqlrdsdefinitions/timekeeping'; +import { AcaciaVersionTypography } from '@/styles/versions/acaciaversion'; const Sidebar = dynamic(() => import('@/components/sidebar'), { ssr: false }); const Header = dynamic(() => import('@/components/header'), { ssr: false }); function renderSwitch(endpoint: string) { - switch (endpoint) { - case '/dashboard': - return

Dashboard - ForestGEO Application User - Guide

; - case '/measurementshub/summary': - return

Measurements Summary

; - case '/measurementshub/validationhistory': - return

Validation History

; - case '/fixeddatainput/attributes': - return

Supporting Data Hub - Attributes

; - case '/fixeddatainput/census': - return

Supporting Data - Census

; - case '/fixeddatainput/personnel': - return

Supporting Data - Personnel

; - case '/fixeddatainput/quadrats': - return

Supporting Data - Quadrats

; - case '/fixeddatainput/subquadrats': - return

Supporting Data - Subquadrats

; - case '/fixeddatainput/quadratpersonnel': - return

Supporting Data - QuadratPersonnel

; - case '/fixeddatainput/alltaxonomies': - return

Supporting Data - All Taxonomies

; - default: - return <>; - } + const commonStyle = { + display: 'flex', + flex: 1, + alignItems: 'center', + justifyContent: 'center', + textAlign: 'center', + minHeight: '50px' + }; + + return ( + +

+ {getEndpointHeaderName(endpoint)} +

+
+ ); } export default function HubLayout({ children }: { children: React.ReactNode }) { @@ -65,202 +45,245 @@ export default function HubLayout({ children }: { children: React.ReactNode }) { const censusListDispatch = useOrgCensusListDispatch(); const quadratListDispatch = useQuadratListDispatch(); const siteListDispatch = useSiteListDispatch(); - const subquadratListDispatch = useSubquadratListDispatch(); const plotListDispatch = usePlotListDispatch(); - const censusListContext = useOrgCensusListContext(); - const quadratListContext = useQuadratListContext(); - const subquadratListContext = useSubquadratListContext(); - const plotListContext = usePlotListContext(); - const currentSite = useSiteContext(); const currentPlot = usePlotContext(); const currentCensus = useOrgCensusContext(); - const currentQuadrat = useQuadratContext(); const { data: session } = useSession(); const previousSiteRef = useRef(undefined); + const previousPlotRef = useRef(undefined); + const previousCensusRef = useRef(undefined); const [siteListLoaded, setSiteListLoaded] = useState(false); const [plotListLoaded, setPlotListLoaded] = useState(false); const [censusListLoaded, setCensusListLoaded] = useState(false); const [quadratListLoaded, setQuadratListLoaded] = useState(false); - const [subquadratListLoaded, setSubquadratListLoaded] = useState(false); const [manualReset, setManualReset] = useState(false); const [isSidebarVisible, setSidebarVisible] = useState(!!session); + const [isFeedbackModalOpen, setIsFeedbackModalOpen] = useState(false); const pathname = usePathname(); + const coreDataLoaded = siteListLoaded && plotListLoaded && censusListLoaded && quadratListLoaded; + const { isPulsing } = useLockAnimation(); - const coreDataLoaded = siteListLoaded && plotListLoaded && censusListLoaded && (quadratListLoaded || subquadratListLoaded); + const lastExecutedRef = useRef(null); + // Refs for debouncing + const plotLastExecutedRef = useRef(null); + const censusLastExecutedRef = useRef(null); + const quadratLastExecutedRef = useRef(null); - const loadCensusData = useCallback(async () => { - if (!currentPlot) return { success: false, message: 'Plot must be selected to load census data' }; - if (censusListContext !== undefined && censusListContext.length > 0) return { success: true }; - - setLoading(true, 'Loading raw census data'); - const response = await fetch(`/api/fetchall/census/${currentPlot.plotID}?schema=${currentSite?.schemaName || ''}`); - const censusRDSLoad = await response.json(); - setLoading(false); - - setLoading(true, 'Converting raw census data...'); - const censusList = await createAndUpdateCensusList(censusRDSLoad); - if (censusListDispatch) { - censusListDispatch({ censusList }); - } - setLoading(false); - setCensusListLoaded(true); - return { success: true }; - }, [censusListContext, censusListDispatch, currentPlot, currentSite, setLoading]); - - const loadPlotsData = useCallback(async () => { - if (!currentSite) return { success: false, message: 'Site must be selected to load plot data' }; - if (plotListContext !== undefined && plotListContext.length > 0) return { success: true }; - - setLoading(true, "Loading plot list information..."); - const plotsResponse = await fetch(`/api/fetchall/plots?schema=${currentSite?.schemaName || ''}`); - const plotsData = await plotsResponse.json(); - if (!plotsData) return { success: false, message: 'Failed to load plots data' }; - setLoading(false); - - setLoading(true, "Dispatching plot list information..."); - if (plotListDispatch) { - await plotListDispatch({ plotList: plotsData }); - } else return { success: false, message: 'Failed to dispatch plots data' }; - setLoading(false); - setPlotListLoaded(true); - return { success: true }; - }, [plotListContext, plotListDispatch, currentSite, setLoading]); - - const loadQuadratsData = useCallback(async () => { - if (!currentPlot || !currentCensus) return { - success: false, - message: 'Plot and Census must be selected to load quadrat data' - }; - if (quadratListContext !== undefined && quadratListContext.length > 0) return { success: true }; - - setLoading(true, "Loading quadrat list information..."); - const quadratsResponse = await fetch(`/api/fetchall/quadrats/${currentPlot.plotID}/${currentCensus.plotCensusNumber}?schema=${currentSite?.schemaName || ''}`); - const quadratsData = await quadratsResponse.json(); - if (!quadratsData) return { success: false, message: 'Failed to load quadrats data' }; - setLoading(false); - - setLoading(true, "Dispatching quadrat list information..."); - if (quadratListDispatch) { - await quadratListDispatch({ quadratList: quadratsData }); - } else return { success: false, message: 'Failed to dispatch quadrats data' }; - setLoading(false); - setQuadratListLoaded(true); - return { success: true }; - }, [quadratListContext, quadratListDispatch, currentPlot, currentCensus, currentSite, setLoading]); - - const loadSubquadratsData = useCallback(async () => { - if (!currentPlot || !currentCensus || !currentQuadrat) return { - success: false, - message: 'Plot, Census, and Quadrat must be selected to load subquadrat data' - }; - if (subquadratListContext !== undefined && subquadratListContext.length > 0) return { success: true }; - - setLoading(true, "Loading subquadrat list information..."); - const subquadratResponse = await fetch(`/api/fetchall/subquadrats/${currentPlot.plotID}/${currentCensus.plotCensusNumber}/${currentQuadrat.quadratID}?schema=${currentSite?.schemaName || ''}`); - const subquadratData = await subquadratResponse.json(); - if (!subquadratData) return { success: false, message: 'Failed to load subquadrats data' }; - setLoading(false); - - setLoading(true, "Dispatching subquadrat list information..."); - if (subquadratListDispatch) { - await subquadratListDispatch({ subquadratList: subquadratData }); - } else return { success: false, message: 'Failed to dispatch subquadrat list' }; - setLoading(false); - setSubquadratListLoaded(true); - return { success: true }; - }, [subquadratListContext, subquadratListDispatch, currentPlot, currentCensus, currentQuadrat, currentSite, setLoading]); + // Debounce delay + const debounceDelay = 300; const fetchSiteList = useCallback(async () => { - setLoading(true, 'Loading Sites...'); + const now = Date.now(); + if (lastExecutedRef.current && now - lastExecutedRef.current < debounceDelay + 200) { + return; + } + + // Update last executed timestamp + lastExecutedRef.current = now; + try { - if (session && !siteListLoaded) { + setLoading(true, 'Loading Sites...'); + if (session && !siteListLoaded && !currentSite) { const sites = session?.user?.allsites ?? []; if (sites.length === 0) { - throw new Error("Session sites undefined"); + throw new Error('Session sites undefined'); } else { - siteListDispatch ? await siteListDispatch({siteList: sites}) : undefined; + if (siteListDispatch) await siteListDispatch({ siteList: sites }); } } } catch (e: any) { const allsites = await getAllSchemas(); - siteListDispatch ? await siteListDispatch({siteList: allsites}) : undefined; + if (siteListDispatch) await siteListDispatch({ siteList: allsites }); + } finally { + setLoading(false); } - setLoading(false); }, [session, siteListLoaded, siteListDispatch, setLoading]); - useEffect(() => { - if (session && !siteListLoaded) { - fetchSiteList() - .then(() => setSiteListLoaded(true)) - .catch(console.error); + const loadPlotData = useCallback(async () => { + const now = Date.now(); + if (plotLastExecutedRef.current && now - plotLastExecutedRef.current < debounceDelay) { + return; } - }, [fetchSiteList, session, siteListLoaded]); + plotLastExecutedRef.current = now; - useEffect(() => { - if (!plotListLoaded && plotListDispatch) { - plotListDispatch({ plotList: undefined }).catch(console.error); + try { + setLoading(true, 'Loading plot data...'); + if (currentSite && !plotListLoaded) { + const response = await fetch(`/api/fetchall/plots?schema=${currentSite?.schemaName || ''}`); + const plotsData = await response.json(); + if (!plotsData) throw new Error('Failed to load plots data'); + if (plotListDispatch) await plotListDispatch({ plotList: plotsData }); + setPlotListLoaded(true); + } + } catch (error) { + console.error('Error loading plot data:', error); + } finally { + setLoading(false); } - if (!censusListLoaded && censusListDispatch) { - censusListDispatch({ censusList: undefined }).catch(console.error); + }, [currentSite, plotListLoaded, plotListDispatch, setLoading]); + + // Function to load census data with debounce + const loadCensusData = useCallback(async () => { + const now = Date.now(); + if (censusLastExecutedRef.current && now - censusLastExecutedRef.current < debounceDelay) { + return; } - if (!quadratListLoaded && quadratListDispatch) { - quadratListDispatch({ quadratList: undefined }).catch(console.error); + censusLastExecutedRef.current = now; + + try { + setLoading(true, 'Loading census data...'); + if (currentSite && currentPlot && !censusListLoaded) { + const response = await fetch(`/api/fetchall/census/${currentPlot.plotID}?schema=${currentSite.schemaName}`); + const censusRDSLoad = await response.json(); + if (!censusRDSLoad) throw new Error('Failed to load census data'); + const censusList = await createAndUpdateCensusList(censusRDSLoad); + if (censusListDispatch) await censusListDispatch({ censusList }); + setCensusListLoaded(true); + } + } catch (error) { + console.error('Error loading census data:', error); + } finally { + setLoading(false); } - if (!subquadratListLoaded && subquadratListDispatch) { - subquadratListDispatch({ subquadratList: undefined }).catch(console.error); + }, [currentSite, currentPlot, censusListLoaded, censusListDispatch, setLoading]); + + // Function to load quadrat data with debounce + const loadQuadratData = useCallback(async () => { + const now = Date.now(); + if (quadratLastExecutedRef.current && now - quadratLastExecutedRef.current < debounceDelay) { + return; } - }, [plotListLoaded, censusListLoaded, quadratListLoaded, subquadratListLoaded]); + quadratLastExecutedRef.current = now; + try { + setLoading(true, 'Loading quadrat data...'); + if (currentSite && currentPlot && currentCensus && !quadratListLoaded) { + const response = await fetch(`/api/fetchall/quadrats/${currentPlot.plotID}/${currentCensus.plotCensusNumber}?schema=${currentSite.schemaName}`); + const quadratsData = await response.json(); + if (!quadratsData) throw new Error('Failed to load quadrats data'); + if (quadratListDispatch) await quadratListDispatch({ quadratList: quadratsData }); + setQuadratListLoaded(true); + } + } catch (error) { + console.error('Error loading quadrat data:', error); + } finally { + setLoading(false); + } + }, [currentSite, currentPlot, currentCensus, quadratListLoaded, quadratListDispatch, setLoading]); + + // Fetch site list if session exists and site list has not been loaded useEffect(() => { - const hasSiteChanged = previousSiteRef.current !== currentSite?.siteName; - if (siteListLoaded && currentSite && hasSiteChanged) { - setPlotListLoaded(false); - setCensusListLoaded(false); - setQuadratListLoaded(false); - setSubquadratListLoaded(false); - previousSiteRef.current = currentSite.siteName; + // Ensure session is ready before attempting to fetch site list + if (session && !siteListLoaded) { + fetchSiteList().catch(console.error); } - if (siteListLoaded && currentSite && !plotListLoaded) { - loadPlotsData().catch(console.error); + }, [session, siteListLoaded, fetchSiteList]); + + // Fetch plot data when currentSite is defined and plotList has not been loaded + useEffect(() => { + if (currentSite && !plotListLoaded) { + loadPlotData().catch(console.error); } - if (siteListLoaded && currentSite && plotListLoaded && !censusListLoaded) { + }, [currentSite, plotListLoaded, loadPlotData]); + + // Fetch census data when currentSite, currentPlot are defined and censusList has not been loaded + useEffect(() => { + if (currentSite && currentPlot && !censusListLoaded) { loadCensusData().catch(console.error); } - if (siteListLoaded && currentSite && plotListLoaded && censusListLoaded && !quadratListLoaded) { - loadQuadratsData().catch(console.error); + }, [currentSite, currentPlot, censusListLoaded, loadCensusData]); + + // Fetch quadrat data when currentSite, currentPlot, currentCensus are defined and quadratList has not been loaded + useEffect(() => { + if (currentSite && currentPlot && currentCensus && !quadratListLoaded) { + loadQuadratData().catch(console.error); } - // if (siteListLoaded && currentSite && plotListLoaded && censusListLoaded && quadratListLoaded && !subquadratListLoaded) { - // loadSubquadratsData().catch(console.error); - // } - }, [siteListLoaded, currentSite, plotListLoaded, censusListLoaded, quadratListLoaded, subquadratListLoaded, loadCensusData, loadPlotsData, loadQuadratsData, loadSubquadratsData]); + }, [currentSite, currentPlot, currentCensus, quadratListLoaded, loadQuadratData]); + // Handle manual reset logic useEffect(() => { if (manualReset) { - setLoading(true, "Manual refresh beginning..."); + setLoading(true, 'Manual refresh beginning...'); + + // Reset all loading states + setSiteListLoaded(false); setPlotListLoaded(false); setCensusListLoaded(false); setQuadratListLoaded(false); - setSubquadratListLoaded(false); - setSiteListLoaded(false); + setManualReset(false); - setLoading(false); } }, [manualReset]); + // Clear lists and reload data when site, plot, or census changes useEffect(() => { - // if contexts are reset due to website refresh, system needs to redirect user back to dashboard - if ((currentSite === undefined && currentPlot === undefined && currentQuadrat === undefined) && pathname !== '/dashboard') redirect('/dashboard'); - }, [pathname]); + const hasSiteChanged = previousSiteRef.current !== currentSite?.siteName; + const hasPlotChanged = previousPlotRef.current !== currentPlot?.plotID; + const hasCensusChanged = previousCensusRef.current !== currentCensus?.dateRanges[0]?.censusID; + const clearLists = async () => { + const promises = []; + + if (hasSiteChanged) { + // Clear plot, census, and quadrat lists when a new site is selected + setPlotListLoaded(false); + setCensusListLoaded(false); + setQuadratListLoaded(false); + if (plotListDispatch) promises.push(plotListDispatch({ plotList: undefined })); + if (censusListDispatch) promises.push(censusListDispatch({ censusList: undefined })); + if (quadratListDispatch) promises.push(quadratListDispatch({ quadratList: undefined })); + previousSiteRef.current = currentSite?.siteName; + } + + if (hasPlotChanged) { + // Clear census and quadrat lists when a new plot is selected + setCensusListLoaded(false); + setQuadratListLoaded(false); + if (censusListDispatch) promises.push(censusListDispatch({ censusList: undefined })); + if (quadratListDispatch) promises.push(quadratListDispatch({ quadratList: undefined })); + previousPlotRef.current = currentPlot?.plotID; + } + + if (hasCensusChanged) { + // Clear quadrat list when a new census is selected + setQuadratListLoaded(false); + if (quadratListDispatch) promises.push(quadratListDispatch({ quadratList: undefined })); + previousCensusRef.current = currentCensus?.dateRanges[0]?.censusID; + } + + await Promise.all(promises); + + // Add a short delay to ensure UI reflects clearing lists before loading new data + setTimeout(() => { + loadPlotData() + .then(() => loadCensusData()) + .then(() => loadQuadratData()) + .catch(console.error); + }, 300); // 300ms delay for UI reset + }; + + if (hasSiteChanged || hasPlotChanged || hasCensusChanged) { + clearLists().catch(console.error); + } + }, [currentSite, currentPlot, currentCensus, plotListDispatch, censusListDispatch, quadratListDispatch, loadPlotData, loadCensusData, loadQuadratData]); + + // Handle redirection if contexts are reset (i.e., no site, plot, or census) and user is not on the dashboard + useEffect(() => { + if (currentSite === undefined && currentPlot === undefined && currentCensus === undefined && pathname !== '/dashboard') { + redirect('/dashboard'); + } + }, [pathname, currentSite, currentPlot, currentCensus]); + + // Handle sidebar visibility based on session presence useEffect(() => { if (session) { const timer = setTimeout(() => { setSidebarVisible(true); - }, 300); // Debounce the sidebar visibility with a delay + }, 300); // Debounce sidebar visibility with a delay return () => clearTimeout(timer); } }, [session]); @@ -268,15 +291,16 @@ export default function HubLayout({ children }: { children: React.ReactNode }) { return ( <> - +
- - {renderSwitch(usePathname())} + + {renderSwitch(pathname)} - + - {coreDataLoaded && ( - <> - {children} - - )} + paddingLeft: 2 + }} + > + {session?.user.name && session.user.email && session.user.userStatus && <>{children}} - - + - -

{siteConfig.name} 

-
- - -

{siteConfig.description}

-
+ justifyContent: 'center', + mt: 2, + position: 'relative' + }} + > + } + className={isPulsing ? 'animate-fade-blur-in' : ''} + > + + {/*{siteConfig.name}*/} + {siteConfig.name} + + {/**/} + {/* */} + {/* */} + {/* {siteConfig.version}*/} + {/* */} + {/* */} + {/**/} + + setIsFeedbackModalOpen(true)} + className={isPulsing ? 'animate-pulse-no-opacity' : ''} + sx={{ + position: 'fixed', + bottom: 20, + right: 20, + zIndex: 2000, + bgcolor: 'primary.main', + color: 'white', + '&:hover': { + bgcolor: 'primary.dark' + } + }} + > + +
+ setIsFeedbackModalOpen(false)} /> ); } diff --git a/frontend/app/(hub)/measurementshub/postvalidation/error.tsx b/frontend/app/(hub)/measurementshub/postvalidation/error.tsx new file mode 100644 index 00000000..eaaea954 --- /dev/null +++ b/frontend/app/(hub)/measurementshub/postvalidation/error.tsx @@ -0,0 +1,16 @@ +'use client'; + +import React from 'react'; +import { Box, Button, Typography } from '@mui/joy'; + +const ErrorPage = ({ error, reset }: { error: Error; reset: () => void }) => { + return ( + + Something went wrong - Plot-Species List + {error.message} + + + ); +}; + +export default ErrorPage; diff --git a/frontend/app/(hub)/measurementshub/postvalidation/page.tsx b/frontend/app/(hub)/measurementshub/postvalidation/page.tsx new file mode 100644 index 00000000..86ecfb66 --- /dev/null +++ b/frontend/app/(hub)/measurementshub/postvalidation/page.tsx @@ -0,0 +1,93 @@ +'use client'; + +import { useOrgCensusContext, usePlotContext, useSiteContext } from '@/app/contexts/userselectionprovider'; +import { useEffect, useState } from 'react'; +import { Box, LinearProgress } from '@mui/joy'; + +interface PostValidations { + queryID: number; + queryName: string; + queryDescription: string; +} + +interface PostValidationResults { + count: number; + data: any; +} + +export default function PostValidationPage() { + const currentSite = useSiteContext(); + const currentPlot = usePlotContext(); + const currentCensus = useOrgCensusContext(); + const [postValidations, setPostValidations] = useState([]); + const [validationResults, setValidationResults] = useState>({}); + const [loadingQueries, setLoadingQueries] = useState(false); + + // Fetch post-validation queries on first render + useEffect(() => { + async function loadQueries() { + try { + setLoadingQueries(true); + const response = await fetch(`/api/postvalidation?schema=${currentSite?.schemaName}`, { method: 'GET' }); + const data = await response.json(); + setPostValidations(data); + } catch (error) { + console.error('Error loading queries:', error); + } finally { + setLoadingQueries(false); + } + } + + if (currentSite?.schemaName) { + loadQueries(); + } + }, [currentSite?.schemaName]); + + // Fetch validation results for each query + useEffect(() => { + async function fetchValidationResults(postValidation: PostValidations) { + try { + const response = await fetch( + `/api/postvalidationbyquery/${currentSite?.schemaName}/${currentPlot?.plotID}/${currentCensus?.dateRanges[0].censusID}/${postValidation.queryID}`, + { method: 'GET' } + ); + const data = await response.json(); + setValidationResults(prev => ({ + ...prev, + [postValidation.queryID]: data + })); + } catch (error) { + console.error(`Error fetching validation results for query ${postValidation.queryID}:`, error); + setValidationResults(prev => ({ + ...prev, + [postValidation.queryID]: null // Mark as failed if there was an error + })); + } + } + + if (postValidations.length > 0 && currentPlot?.plotID && currentCensus?.dateRanges) { + postValidations.forEach(postValidation => { + fetchValidationResults(postValidation).then(r => console.log(r)); + }); + } + }, [postValidations, currentPlot?.plotID, currentCensus?.dateRanges, currentSite?.schemaName]); + + return ( + + {loadingQueries ? ( + + ) : postValidations.length > 0 ? ( + + {postValidations.map(postValidation => ( + +
{postValidation.queryName}
+ {validationResults[postValidation.queryID] ? : } +
+ ))} +
+ ) : ( +
No validations available.
+ )} +
+ ); +} diff --git a/frontend/app/(hub)/measurementshub/summary/error.tsx b/frontend/app/(hub)/measurementshub/summary/error.tsx new file mode 100644 index 00000000..c45668bf --- /dev/null +++ b/frontend/app/(hub)/measurementshub/summary/error.tsx @@ -0,0 +1,16 @@ +'use client'; + +import React from 'react'; +import { Box, Button, Typography } from '@mui/joy'; + +const ErrorPage = ({ error, reset }: { error: Error; reset: () => void }) => { + return ( + + Something went wrong - View Data Page + {error.message} + + + ); +}; + +export default ErrorPage; diff --git a/frontend/app/(hub)/measurementshub/summary/page.tsx b/frontend/app/(hub)/measurementshub/summary/page.tsx index 8ceb8938..da8b5f8b 100644 --- a/frontend/app/(hub)/measurementshub/summary/page.tsx +++ b/frontend/app/(hub)/measurementshub/summary/page.tsx @@ -1,392 +1,5 @@ -'use client'; -import React, { useEffect, useState } from "react"; -import { GridRowModes, GridRowModesModel, GridRowsProp } from "@mui/x-data-grid"; -import { Alert, AlertProps, LinearProgress, Tooltip, TooltipProps, styled, tooltipClasses } from "@mui/material"; -import { gridColumnsArrayMSVRDS, initialMeasurementsSummaryViewRDSRow } from '@/config/sqlrdsdefinitions/views/measurementssummaryviewrds'; -import { - Box, - ListItemContent, - ListItem, - List, - Modal, - ModalDialog, - Typography, - Button, - DialogTitle, - DialogContent, - DialogActions, - Snackbar, - Stack, -} from "@mui/joy"; -import Select, { SelectOption } from "@mui/joy/Select"; -import { useSession } from "next-auth/react"; -import { - useOrgCensusContext, - usePlotContext, - useQuadratDispatch, - useSiteContext -} from "@/app/contexts/userselectionprovider"; -import { randomId } from "@mui/x-data-grid-generator"; -import UploadParentModal from "@/components/uploadsystemhelpers/uploadparentmodal"; -import { useQuadratListContext } from "@/app/contexts/listselectionprovider"; -import { Quadrat } from "@/config/sqlrdsdefinitions/tables/quadratrds"; -import Option from '@mui/joy/Option'; -import MeasurementSummaryGrid from "@/components/datagrids/msvdatagrid"; -import { useDataValidityContext } from "@/app/contexts/datavalidityprovider"; -import { UnifiedValidityFlags } from "@/config/macros"; - -const LargeTooltip = styled(({ className, ...props }: TooltipProps) => ( - -))(({ theme }) => ({ - [`& .${tooltipClasses.tooltip}`]: { - fontSize: 16, - maxWidth: 600, // Increase maxWidth to give more space for text - }, -})); - -interface ChecklistProgress { - progress: number; - message: string; - error?: string; -} +import MeasurementsSummaryViewDataGrid from '@/components/datagrids/applications/measurementssummaryviewdatagrid'; export default function SummaryPage() { - const { data: session } = useSession(); - const [quadrat, setQuadrat] = useState(); - const [quadratList, setQuadratList] = useState([]); - const currentPlot = usePlotContext(); - const currentCensus = useOrgCensusContext(); - const currentSite = useSiteContext(); - const quadratListContext = useQuadratListContext(); - const quadratDispatch = useQuadratDispatch(); - const { validity, recheckValidityIfNeeded } = useDataValidityContext(); - const [progressDialogOpen, setProgressDialogOpen] = useState(false); - const [isUploadAllowed, setIsUploadAllowed] = useState(false); - const [isUploadModalOpen, setIsUploadModalOpen] = useState(false); - const [triggerGlobalError, setTriggerGlobalError] = useState(false); - const [globalError, setGlobalError] = useState(null); - - useEffect(() => { - if (currentPlot) { - // ensure that selectable list is restricted by selected plot - setQuadratList(quadratListContext?.filter(quadrat => quadrat?.plotID === currentPlot.plotID) || undefined); - } - }, [currentPlot, quadratListContext]); - - const [rows, setRows] = React.useState([initialMeasurementsSummaryViewRDSRow] as GridRowsProp); - const [rowCount, setRowCount] = useState(0); // total number of rows - const [rowModesModel, setRowModesModel] = React.useState({}); - const [snackbar, setSnackbar] = React.useState | null>(null); - const [refresh, setRefresh] = useState(false); - const [paginationModel, setPaginationModel] = useState({ - page: 0, - pageSize: 10, - }); - const [isNewRowAdded, setIsNewRowAdded] = useState(false); - const [shouldAddRowAfterFetch, setShouldAddRowAfterFetch] = useState(false); - const [useSubquadrats, setUseSubquadrats] = useState(currentPlot?.usesSubquadrats ?? false); - - useEffect(() => { - const verifyPreconditions = async () => { - setIsUploadAllowed(!Object.entries(validity).filter(item => item[0] !== 'subquadrats').map(item => item[1]).includes(false)); - }; - - if (progressDialogOpen) { - verifyPreconditions().catch(console.error); - } - }, [progressDialogOpen, validity]); - - const addNewRowToGrid = () => { - const id = randomId(); - // Define new row structure based on MeasurementsSummaryRDS type - const newRow = { - ...initialMeasurementsSummaryViewRDSRow, - id: id, - coreMeasurementID: 0, - plotID: currentPlot?.plotID, - plotName: currentPlot?.plotName, - censusID: currentCensus?.dateRanges[0].censusID, - censusStartDate: currentCensus?.dateRanges[0]?.startDate, - censusEndDate: currentCensus?.dateRanges[0]?.endDate, - isNew: true, - }; - setRows(oldRows => [...oldRows, newRow]); - setRowModesModel(oldModel => ({ ...oldModel, [id]: { mode: GridRowModes.Edit } })); - }; - - const handleCloseGlobalError = () => { - setGlobalError(null); - setTriggerGlobalError(false); - }; - - const handleCloseProgressDialog = () => { - setProgressDialogOpen(false); - if (isUploadAllowed) { - setTimeout(() => { - setIsUploadModalOpen(true); - }, 300); - } else { - setGlobalError('Missing prerequisites! Please upload supporting data before submitting measurements!'); - setTriggerGlobalError(true); - } - }; - - const checklistItems: (keyof UnifiedValidityFlags)[] = ['attributes', 'species', 'personnel', 'quadrats']; - - const ProgressDialog = () => ( - { - }} - sx={{ display: 'flex', alignItems: 'center', justifyContent: 'center' }} - > - - Pre-Validation Systems Check - - Measurements Upload Warning: - - In order to upload measurements, all of the following tables must be populated! - - - {checklistItems.map((item) => { - const isValid = validity[item]; - const progressData = isValid - ? { - progress: 100, - message: `Passed: ${item.charAt(0).toUpperCase() + item.substring(1)}`, - error: undefined - } - : { - progress: 0, - message: `Failure: ${item.charAt(0).toUpperCase() + item.substring(1)}`, - error: `${item.charAt(0).toUpperCase() + item.substring(1)} is invalid or missing.` - }; - const tooltipMessage = progressData.error - ? `${progressData.error}` - : progressData.message; - - return ( - - - {progressData.message} - - - - - - - - ); - })} - - - - - - - - - ); - - const renderQuadratValue = (option: SelectOption | null) => { - if (!option) { - return Select a Quadrat; // or some placeholder JSX - } - - // Find the corresponding Quadrat object - const selectedValue = option.value; // assuming option has a 'value' property - const selectedQuadrat = quadratListContext?.find(c => c?.quadratName === selectedValue); - - // Return JSX - return selectedQuadrat ? {`Quadrat: ${selectedQuadrat?.quadratName}`} : - No Quadrat; - }; - - const handleQuadratSelection = async (selectedQuadrat: Quadrat | undefined) => { - setQuadrat(selectedQuadrat); - if (quadratDispatch) { - await quadratDispatch({ quadrat: selectedQuadrat }); - } - }; - - const handleConfirmQuadrat = async () => { - await handleQuadratSelection(quadrat); - }; - - const QuadratSelectionMenu = () => ( - - Select Quadrat: - - - {!validity['quadrats'] && ( - - No quadrats exist to be selected. - - )} - - ); - - useEffect(() => { - const updateUseSubquadrats = async () => { - const updatedPlot = { - ...currentPlot, - usesSubquadrats: useSubquadrats, - }; - const response = await fetch(`/api/fixeddata/plots?schema=${currentSite?.schemaName ?? ''}`, { - method: 'PATCH', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify(updatedPlot) - }); - if (!response.ok) setGlobalError('Toggling subquadrats usage failed!'); - }; - - if (currentPlot?.usesSubquadrats !== useSubquadrats) { - updateUseSubquadrats().catch(console.error); - } - }, [currentPlot, useSubquadrats]); - - return ( - <> - {globalError && ( - - {globalError} - - )} - - - - - - - {currentPlot?.usesSubquadrats ? ( - - Note: This plot has been set to accept - subquadrats.
- Please ensure you select a quadrat before proceeding.
- -
- ) : ( - Note: This plot does not accept - subquadrats.
- Please ensure that you use quadrat names when submitting new measurements instead of subquadrat - names
- )} - {session?.user.isAdmin ? ( - - {/* Note: ADMINISTRATOR VIEW - - Please use the toggle to change this - setting if it is incorrect - setUseSubquadrats(event.target.checked)} - color={useSubquadrats ? 'primary' : 'neutral'} - variant={useSubquadrats ? 'solid' : 'outlined'} - endDecorator={useSubquadrats ? 'Use subquadrats' : 'Use quadrats'} - slotProps={{ - endDecorator: { - sx: { - minWidth: 24, - }, - }, - }} - /> - */} - - ) : ( - If this setting is inaccurate, please contact - an administrator. - )} -
-
-
- - - -
-
- { - setIsUploadModalOpen(false); - setRefresh(true); - }} - formType={"measurements"} - /> - - - ); -} \ No newline at end of file + return ; +} diff --git a/frontend/app/(hub)/measurementshub/uploadedfiles/error.tsx b/frontend/app/(hub)/measurementshub/uploadedfiles/error.tsx new file mode 100644 index 00000000..1ed10f37 --- /dev/null +++ b/frontend/app/(hub)/measurementshub/uploadedfiles/error.tsx @@ -0,0 +1,16 @@ +'use client'; + +import React from 'react'; +import { Box, Button, Typography } from '@mui/joy'; + +const ErrorPage = ({ error, reset }: { error: Error; reset: () => void }) => { + return ( + + Something went wrong - Uploaded Files Page + {error.message} + + + ); +}; + +export default ErrorPage; diff --git a/frontend/app/(hub)/measurementshub/uploadedfiles/page.tsx b/frontend/app/(hub)/measurementshub/uploadedfiles/page.tsx index 263b1ec9..ccbda098 100644 --- a/frontend/app/(hub)/measurementshub/uploadedfiles/page.tsx +++ b/frontend/app/(hub)/measurementshub/uploadedfiles/page.tsx @@ -1,15 +1,15 @@ -"use client"; +'use client'; -import {useOrgCensusContext, usePlotContext} from "@/app/contexts/userselectionprovider"; -import ViewUploadedFiles from "@/components/uploadsystemhelpers/viewuploadedfiles"; -import {useState} from "react"; +import { useOrgCensusContext, usePlotContext } from '@/app/contexts/userselectionprovider'; +import ViewUploadedFiles from '@/components/uploadsystemhelpers/viewuploadedfiles'; +import { useState } from 'react'; export default function UploadedFilesPage() { const [refreshFileList, setRefreshFileList] = useState(false); const currentPlot = usePlotContext(); const currentCensus = useOrgCensusContext(); - return ; -} \ No newline at end of file + return ( + + ); +} diff --git a/frontend/app/(hub)/measurementshub/validationhistory/page.tsx b/frontend/app/(hub)/measurementshub/validationhistory/page.tsx deleted file mode 100644 index ab2a9cb4..00000000 --- a/frontend/app/(hub)/measurementshub/validationhistory/page.tsx +++ /dev/null @@ -1,5 +0,0 @@ -"use client"; - -export default function ValidationHistoryPage() { - return <>; -} \ No newline at end of file diff --git a/frontend/app/(hub)/measurementshub/validations/error.tsx b/frontend/app/(hub)/measurementshub/validations/error.tsx new file mode 100644 index 00000000..51e41ab7 --- /dev/null +++ b/frontend/app/(hub)/measurementshub/validations/error.tsx @@ -0,0 +1,104 @@ +'use client'; // Error boundaries must be Client Components + +import { useEffect } from 'react'; +import { Alert, Box, Button, Card, CardContent, Divider, Stack, Typography } from '@mui/joy'; +import CircularProgress from '@mui/joy/CircularProgress'; +import { Warning } from '@mui/icons-material'; +import { useSession } from 'next-auth/react'; +import { useRouter } from 'next/navigation'; + +export default function Error({ error, reset }: { error: Error & { digest?: string }; reset: () => void }) { + const { data: session } = useSession(); + useEffect(() => { + // Log the error to an error reporting service + console.error(error); + }, [error]); + const router = useRouter(); + if (error.message === 'access-denied') { + return ( + + + + + } + sx={{ alignItems: 'flex-start', gap: '1rem' }} + > + + Access Denied + Unfortunately, you do not have access to this webpage. + + Your assigned role is + + {session?.user?.userStatus} + + + Please submit a GitHub issue if this is incorrect and you should have access to this page. + + + + + + + + ); + } + return ( + + + + + } + sx={{ alignItems: 'flex-start', gap: '1rem' }} + > + + Oh no! + Something unexpected seems to have went wrong. + Please provide the following metadata to an administrator so they can diagnose the problem further! + + + + Metadata + + + + + Error Message:{' '} + + {error.message} + + + + + Your assigned role is + + {session?.user?.userStatus} + + + Please submit a GitHub issue if this is incorrect and you should have access to this page. + + + + + + + + ); +} diff --git a/frontend/app/(hub)/measurementshub/validations/page.tsx b/frontend/app/(hub)/measurementshub/validations/page.tsx new file mode 100644 index 00000000..119a76b7 --- /dev/null +++ b/frontend/app/(hub)/measurementshub/validations/page.tsx @@ -0,0 +1,123 @@ +'use client'; + +import { Box, Card, CardContent, Typography } from '@mui/joy'; +import React, { useEffect, useState } from 'react'; +import ValidationCard from '@/components/validationcard'; +import { ValidationProceduresRDS } from '@/config/sqlrdsdefinitions/validations'; +import { useSiteContext } from '@/app/contexts/userselectionprovider'; +import { useSession } from 'next-auth/react'; + +export default function ValidationsPage() { + const [globalValidations, setGlobalValidations] = React.useState([]); + const [loading, setLoading] = useState(true); // Use a loading state instead of refresh + const [schemaDetails, setSchemaDetails] = useState<{ table_name: string; column_name: string }[]>([]); + const { data: session } = useSession(); + + const currentSite = useSiteContext(); + + useEffect(() => { + if (session !== null && !['db admin', 'global'].includes(session.user.userStatus)) { + throw new Error('access-denied'); + } + }, []); + + const handleSaveChanges = async (updatedValidation: ValidationProceduresRDS) => { + try { + // Make the API call to toggle the validation + const response = await fetch(`/api/validations/crud`, { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(updatedValidation) // Pass the entire updated validation object + }); + if (response.ok) { + // Update the globalValidations state directly + setGlobalValidations(prev => prev.map(val => (val.validationID === updatedValidation.validationID ? updatedValidation : val))); + } else { + console.error('Failed to toggle validation'); + } + } catch (error) { + console.error('Error toggling validation:', error); + } + }; + + const handleDelete = async (validationID?: number) => { + try { + // Make the API call to delete the validation + const response = await fetch(`/api/validations/delete/${validationID}`, { + method: 'DELETE' + }); + if (response.ok) { + // Remove the deleted validation from the globalValidations state + setGlobalValidations(prev => prev.filter(validation => validation.validationID !== validationID)); + } else { + console.error('Failed to delete validation'); + } + } catch (error) { + console.error('Error deleting validation:', error); + } + }; + + useEffect(() => { + async function fetchValidations() { + try { + const response = await fetch('/api/validations/crud', { method: 'GET' }); + const data = await response.json(); + console.log('data: ', data); + setGlobalValidations(data); + } catch (err) { + console.error('Error fetching validations:', err); + } finally { + setLoading(false); // Loading is complete + } + } + + fetchValidations().catch(console.error); // Initial load + }, []); + + // Fetch schema details when component mounts + useEffect(() => { + const fetchSchema = async () => { + try { + const response = await fetch(`/api/structure/${currentSite?.schemaName ?? ''}`); + const data = await response.json(); + if (data.schema) { + setSchemaDetails(data.schema); + } + } catch (error) { + console.error('Error fetching schema:', error); + } + }; + + if (currentSite?.schemaName) { + fetchSchema().then(r => console.log(r)); + } + }, [currentSite?.schemaName]); + + return ( + + + + + Review Global Validations + + {globalValidations.map(validation => ( + + ))} + + + + + + Review Site-Specific Validations + + + + + ); +} diff --git a/frontend/app/(hub)/measurementshub/viewfulltable/error.tsx b/frontend/app/(hub)/measurementshub/viewfulltable/error.tsx new file mode 100644 index 00000000..568ba7e2 --- /dev/null +++ b/frontend/app/(hub)/measurementshub/viewfulltable/error.tsx @@ -0,0 +1,16 @@ +'use client'; + +import React from 'react'; +import { Box, Button, Typography } from '@mui/joy'; + +const ErrorPage = ({ error, reset }: { error: Error; reset: () => void }) => { + return ( + + Something went wrong - View Full Table Page + {error.message} + + + ); +}; + +export default ErrorPage; diff --git a/frontend/app/(hub)/measurementshub/viewfulltable/page.tsx b/frontend/app/(hub)/measurementshub/viewfulltable/page.tsx new file mode 100644 index 00000000..39dfb781 --- /dev/null +++ b/frontend/app/(hub)/measurementshub/viewfulltable/page.tsx @@ -0,0 +1,7 @@ +'use client'; + +import ViewFullTableDataGrid from '@/components/datagrids/applications/viewfulltabledatagrid'; + +export default function ViewFullTablePage() { + return ; +} diff --git a/frontend/app/(login)/login/error.tsx b/frontend/app/(login)/login/error.tsx new file mode 100644 index 00000000..c864b03d --- /dev/null +++ b/frontend/app/(login)/login/error.tsx @@ -0,0 +1,24 @@ +'use client'; + +import React, { useEffect } from 'react'; +import { Box, Button, Typography } from '@mui/joy'; + +const ErrorPage = ({ error, reset }: { error: Error; reset: () => void }) => { + useEffect(() => { + const timer = setTimeout(() => { + reset(); + }, 5000); + return () => clearTimeout(timer); + }, [reset]); + + return ( + + Something went wrong - Login Page + {error.message} + Retrying in 5 seconds... + + + ); +}; + +export default ErrorPage; diff --git a/frontend/app/(login)/login/page.tsx b/frontend/app/(login)/login/page.tsx index 5cd4f923..4c5acaf2 100644 --- a/frontend/app/(login)/login/page.tsx +++ b/frontend/app/(login)/login/page.tsx @@ -1,34 +1,29 @@ -"use client"; -import React, {useEffect, useState} from "react"; -import {useSession} from "next-auth/react"; -import {animated, useTransition} from "@react-spring/web"; -import styles from "@/styles/styles.module.css"; -import Box from "@mui/joy/Box"; -import UnauthenticatedSidebar from "@/components/unauthenticatedsidebar"; -import {redirect} from "next/navigation"; +'use client'; +import React, { useEffect, useState } from 'react'; +import { useSession } from 'next-auth/react'; +import { animated, useTransition } from '@react-spring/web'; +import styles from '@/styles/styles.module.css'; +import Box from '@mui/joy/Box'; +import UnauthenticatedSidebar from '@/components/unauthenticatedsidebar'; +import { redirect } from 'next/navigation'; -const slides = [ - 'background-1.jpg', - 'background-2.jpg', - 'background-3.jpg', - 'background-4.jpg', -]; +const slides = ['background-1.jpg', 'background-2.jpg', 'background-3.jpg', 'background-4.jpg']; export default function LoginPage() { - const {data: _session, status} = useSession(); + const { data: _session, status } = useSession(); const [index, setIndex] = useState(0); const transitions = useTransition(index, { key: index, - from: {opacity: 0}, - enter: {opacity: 0.5}, - leave: {opacity: 0}, - config: {duration: 5000}, + from: { opacity: 0 }, + enter: { opacity: 0.5 }, + leave: { opacity: 0 }, + config: { duration: 5000 }, onRest: (_a, _b, item) => { if (index === item) { setIndex(state => (state + 1) % slides.length); } }, - exitBeforeEnter: true, + exitBeforeEnter: true }); // feedback received -- endless loop will consume too many resources and needs to be removed. Single loop through all slides should suffice. @@ -36,22 +31,23 @@ export default function LoginPage() { setInterval(() => setIndex(state => (state + 1) % slides.length), 5000); }, []); - if (status === "unauthenticated") { + if (status === 'unauthenticated') { return ( - + {transitions((style, i) => ( + data-testid={`${slides[i]}`} + /> ))} - + ); - } else if (status === "authenticated") { + } else if (status === 'authenticated') { redirect('/dashboard'); } -} \ No newline at end of file +} diff --git a/frontend/app/api/auth/[[...nextauth]]/route.ts b/frontend/app/api/auth/[[...nextauth]]/route.ts index 0925f000..64a1f752 100644 --- a/frontend/app/api/auth/[[...nextauth]]/route.ts +++ b/frontend/app/api/auth/[[...nextauth]]/route.ts @@ -1,46 +1,67 @@ -import NextAuth, { AzureADProfile } from "next-auth"; -import AzureADProvider from "next-auth/providers/azure-ad"; -import { getAllowedSchemas, getAllSchemas, verifyEmail } from "@/components/processors/processorhelperfunctions"; -import { SitesRDS } from '@/config/sqlrdsdefinitions/tables/sitesrds'; +import NextAuth, { AzureADProfile } from 'next-auth'; +import AzureADProvider from 'next-auth/providers/azure-ad'; +import { getAllowedSchemas, getAllSchemas } from '@/components/processors/processorhelperfunctions'; +import { UserAuthRoles } from '@/config/macros'; +import { SitesRDS } from '@/config/sqlrdsdefinitions/zones'; +import { getConn, runQuery } from '@/components/processors/processormacros'; const handler = NextAuth({ - secret: process.env.NEXTAUTH_SECRET as string, + secret: process.env.NEXTAUTH_SECRET!, providers: [ AzureADProvider({ clientId: process.env.AZURE_AD_CLIENT_ID!, clientSecret: process.env.AZURE_AD_CLIENT_SECRET!, tenantId: process.env.AZURE_AD_TENANT_ID!, - authorization: { params: { scope: "openid profile email user.Read" } }, - }), + authorization: { params: { scope: 'openid profile email user.Read' } } + }) ], session: { - strategy: "jwt", - maxAge: 24 * 60 * 60, // 24 hours (you can adjust this value as needed) + strategy: 'jwt', + maxAge: 24 * 60 * 60 // 24 hours (you can adjust this value as needed) }, callbacks: { - async signIn({ user, account, profile, email: signInEmail, credentials }) { - console.log('user: ', user); - console.log('account: ', account); - console.log('credentials: ', credentials); - console.log('profile: ', profile); + async signIn({ user, profile, email: signInEmail }) { + console.log('callback -- signin'); const azureProfile = profile as AzureADProfile; const userEmail = user.email || signInEmail || azureProfile.preferred_username; + console.log('user email: ', userEmail); if (typeof userEmail !== 'string') { console.error('User email is not a string:', userEmail); return false; // Email is not a valid string, abort sign-in } if (userEmail) { - const { emailVerified, isAdmin } = await verifyEmail(userEmail); - if (!emailVerified) { - throw new Error("User email not found."); + console.log('getting connection'); + let conn, emailVerified, userStatus; + try { + conn = await getConn(); + console.log('obtained'); + const query = `SELECT UserStatus FROM catalog.users WHERE Email = '${userEmail}' LIMIT 1`; + const results = await runQuery(conn, query); + console.log('results: ', results); + + // emailVerified is true if there is at least one result + emailVerified = results.length > 0; + console.log('emailVerified: ', emailVerified); + if (!emailVerified) { + console.error('User email not found.'); + return false; + } + userStatus = results[0].UserStatus; + console.log('userStatus: ', userStatus); + } catch (e: any) { + console.error('Error fetching user status:', e); + throw new Error('Failed to fetch user status.'); + } finally { + if (conn) conn.release(); } - user.isAdmin = isAdmin; // Add isAdmin property to the user object + user.userStatus = userStatus as UserAuthRoles; user.email = userEmail; // console.log('getting all sites: '); const allSites = await getAllSchemas(); const allowedSites = await getAllowedSchemas(userEmail); if (!allowedSites || !allSites) { - throw new Error("User does not have any allowed sites."); + console.error('User does not have any allowed sites.'); + return false; } user.sites = allowedSites; @@ -51,20 +72,20 @@ const handler = NextAuth({ }, async jwt({ token, user }) { - if (user?.isAdmin !== undefined) token.isAdmin = user.isAdmin; // Persist admin status in the JWT token - if (user?.sites !== undefined) token.sites = user.sites; // persist allowed sites in JWT token - if (user?.allsites !== undefined) token.allsites = user.allsites; - // console.log('jwt admin state: ', token.isAdmin); - // console.log('jwt sites: ', token.sites); - // console.log('jwt all sites: ', token.allsites); + // If this is the first time the JWT is issued, persist custom properties + if (user) { + token.userStatus = user.userStatus; + token.sites = user.sites; + token.allsites = user.allsites; + } return token; }, async session({ session, token }) { - if (typeof token.isAdmin === 'boolean') { - session.user.isAdmin = token.isAdmin; + if (typeof token.userStatus === 'string') { + session.user.userStatus = token.userStatus as UserAuthRoles; } else { - session.user.isAdmin = false; + session.user.userStatus = 'field crew' as UserAuthRoles; // default no admin permissions } if (token && token.allsites && Array.isArray(token.allsites)) { session.user.allsites = token.allsites as SitesRDS[]; @@ -72,15 +93,12 @@ const handler = NextAuth({ if (token && token.sites && Array.isArray(token.sites)) { session.user.sites = token.sites as SitesRDS[]; } - // console.log('session admin state: ', session.user.isAdmin); - // console.log('session sites: ', session.user.sites); - // console.log('session all sites: ', session.user.allsites); return session; - }, + } }, pages: { - error: '/loginfailed', + error: '/loginfailed' } }); -export { handler as GET, handler as POST }; \ No newline at end of file +export { handler as GET, handler as POST }; diff --git a/frontend/app/api/changelog/overview/[changelogType]/[[...options]]/route.ts b/frontend/app/api/changelog/overview/[changelogType]/[[...options]]/route.ts new file mode 100644 index 00000000..12b814eb --- /dev/null +++ b/frontend/app/api/changelog/overview/[changelogType]/[[...options]]/route.ts @@ -0,0 +1,46 @@ +import { NextRequest, NextResponse } from 'next/server'; +import { PoolConnection } from 'mysql2/promise'; +import { getConn, runQuery } from '@/components/processors/processormacros'; +import { HTTPResponses } from '@/config/macros'; +import MapperFactory from '@/config/datamapper'; + +export async function GET(request: NextRequest, { params }: { params: { changelogType: string; options?: string[] } }) { + const schema = request.nextUrl.searchParams.get('schema'); + if (!schema) throw new Error('schema not found'); + if (!params.changelogType) throw new Error('changelogType not provided'); + if (!params.options) throw new Error('options not provided'); + if (params.options.length !== 2) throw new Error('Missing plot id or census id parameters'); + const [plotIDParam, pcnParam] = params.options; + const plotID = parseInt(plotIDParam); + const pcn = parseInt(pcnParam); + let conn: PoolConnection | null = null; + try { + conn = await getConn(); + let query = ``; + switch (params.changelogType) { + case 'unifiedchangelog': + query = ` + SELECT * FROM ${schema}.unifiedchangelog + WHERE + (PlotID = ? OR PlotID IS NULL) AND + (CensusID IN (SELECT CensusID FROM ${schema}.census WHERE PlotID = ? AND PlotCensusNumber = ?) OR CensusID IS NULL) + ORDER BY ChangeTimestamp DESC + LIMIT 5;`; + break; + case 'validationchangelog': + query = `SELECT * + FROM ${schema}.${params.changelogType} + ORDER BY RunDateTime DESC LIMIT 5;`; + break; + } + + const results = await runQuery(conn, query, [plotID, plotID, pcn]); + return new NextResponse(results.length > 0 ? JSON.stringify(MapperFactory.getMapper(params.changelogType).mapData(results)) : null, { + status: HTTPResponses.OK + }); + } catch (e: any) { + throw new Error('SQL query failed: ' + e.message); + } finally { + if (conn) conn.release(); + } +} diff --git a/frontend/app/api/cmprevalidation/[dataType]/[[...slugs]]/route.ts b/frontend/app/api/cmprevalidation/[dataType]/[[...slugs]]/route.ts index 0d324a93..0685afff 100644 --- a/frontend/app/api/cmprevalidation/[dataType]/[[...slugs]]/route.ts +++ b/frontend/app/api/cmprevalidation/[dataType]/[[...slugs]]/route.ts @@ -1,14 +1,24 @@ -import {getConn, runQuery} from "@/components/processors/processormacros"; -import {PoolConnection} from "mysql2/promise"; -import {NextRequest, NextResponse} from "next/server"; -import {HTTPResponses} from "@/config/macros"; +import { getConn, runQuery } from '@/components/processors/processormacros'; +import { PoolConnection } from 'mysql2/promise'; +import { NextRequest, NextResponse } from 'next/server'; +import { HTTPResponses } from '@/config/macros'; // datatype: table name // expecting 1) schema 2) plotID 3) plotCensusNumber -export async function GET(_request: NextRequest, {params}: { params: { dataType: string, slugs?: string[] } }) { - if (!params.slugs || !params.dataType) throw new Error("missing slugs"); +export async function GET(_request: NextRequest, { params }: { params: { dataType: string; slugs?: string[] } }) { + if (!params.slugs || !params.dataType) throw new Error('missing slugs'); const [schema, plotID, plotCensusNumber] = params.slugs; - if ((!schema || schema === 'undefined') || (!plotID || plotID === 'undefined') || (!plotCensusNumber || plotCensusNumber === 'undefined') || (params.slugs.length > 3 || params.slugs.length < 3)) throw new Error("incorrect slugs provided"); + if ( + !schema || + schema === 'undefined' || + !plotID || + plotID === 'undefined' || + !plotCensusNumber || + plotCensusNumber === 'undefined' || + params.slugs.length > 3 || + params.slugs.length < 3 + ) + throw new Error('incorrect slugs provided'); let connection: PoolConnection | null = null; try { @@ -16,55 +26,82 @@ export async function GET(_request: NextRequest, {params}: { params: { dataType: switch (params.dataType) { case 'attributes': - case 'personnel': case 'species': const baseQuery = `SELECT 1 FROM ${schema}.${params.dataType} LIMIT 1`; // Check if the table has any row const baseResults = await runQuery(connection, baseQuery); if (connection) connection.release(); - if (baseResults.length === 0) return new NextResponse(null, {status: HTTPResponses.PRECONDITION_VALIDATION_FAILURE}); + if (baseResults.length === 0) + return new NextResponse(null, { + status: HTTPResponses.PRECONDITION_VALIDATION_FAILURE + }); + break; + case 'personnel': + const pQuery = `SELECT 1 FROM ${schema}.${params.dataType} WHERE CensusID IN (SELECT CensusID from ${schema}.census WHERE PlotID = ${plotID} AND PlotCensusNumber = ${plotCensusNumber})`; // Check if the table has any row + const pResults = await runQuery(connection, pQuery); + if (connection) connection.release(); + if (pResults.length === 0) + return new NextResponse(null, { + status: HTTPResponses.PRECONDITION_VALIDATION_FAILURE + }); break; case 'quadrats': - const query = `SELECT 1 FROM ${schema}.${params.dataType} WHERE PlotID = ${plotID} AND CensusID IN (SELECT CensusID from ${schema}.census WHERE PlotCensusNumber = ${plotCensusNumber})`; // Check if the table has any row + const query = `SELECT 1 FROM ${schema}.${params.dataType} WHERE PlotID = ${plotID} AND CensusID IN (SELECT CensusID from ${schema}.census WHERE PlotID = ${plotID} AND PlotCensusNumber = ${plotCensusNumber})`; // Check if the table has any row const results = await runQuery(connection, query); if (connection) connection.release(); - if (results.length === 0) return new NextResponse(null, {status: HTTPResponses.PRECONDITION_VALIDATION_FAILURE}); + if (results.length === 0) + return new NextResponse(null, { + status: HTTPResponses.PRECONDITION_VALIDATION_FAILURE + }); break; case 'subquadrats': const subquadratsQuery = `SELECT 1 FROM ${schema}.${params.dataType} s JOIN ${schema}.quadrats q ON s.QuadratID = q.QuadratID WHERE q.PlotID = ${plotID} - AND q.CensusID IN (SELECT CensusID from ${schema}.census WHERE PlotCensusNumber = ${plotCensusNumber}) LIMIT 1`; + AND q.CensusID IN (SELECT CensusID from ${schema}.census WHERE PlotID = ${plotID} AND PlotCensusNumber = ${plotCensusNumber}) LIMIT 1`; const subquadratsResults = await runQuery(connection, subquadratsQuery); if (connection) connection.release(); - if (subquadratsResults.length === 0) return new NextResponse(null, {status: HTTPResponses.PRECONDITION_VALIDATION_FAILURE}); + if (subquadratsResults.length === 0) + return new NextResponse(null, { + status: HTTPResponses.PRECONDITION_VALIDATION_FAILURE + }); break; case 'quadratpersonnel': // Validation for quadrats table const quadratsQuery = `SELECT 1 FROM ${schema}.quadrats WHERE PlotID = ${plotID} - AND CensusID IN (SELECT CensusID from ${schema}.census WHERE PlotCensusNumber = ${plotCensusNumber}) LIMIT 1`; + AND CensusID IN (SELECT CensusID from ${schema}.census WHERE PlotID = ${plotID} AND PlotCensusNumber = ${plotCensusNumber}) LIMIT 1`; const quadratsResults = await runQuery(connection, quadratsQuery); if (connection) connection.release(); - if (quadratsResults.length === 0) return new NextResponse(null, {status: HTTPResponses.PRECONDITION_VALIDATION_FAILURE}); + if (quadratsResults.length === 0) + return new NextResponse(null, { + status: HTTPResponses.PRECONDITION_VALIDATION_FAILURE + }); // Validation for personnel table const personnelQuery = `SELECT 1 FROM ${schema}.personnel LIMIT 1`; const personnelResults = await runQuery(connection, personnelQuery); if (connection) connection.release(); - if (personnelResults.length === 0) return new NextResponse(null, {status: HTTPResponses.PRECONDITION_VALIDATION_FAILURE}); + if (personnelResults.length === 0) + return new NextResponse(null, { + status: HTTPResponses.PRECONDITION_VALIDATION_FAILURE + }); break; default: - return new NextResponse(null, {status: HTTPResponses.PRECONDITION_VALIDATION_FAILURE}); + return new NextResponse(null, { + status: HTTPResponses.PRECONDITION_VALIDATION_FAILURE + }); } // If all conditions are satisfied connection.release(); - return new NextResponse(null, {status: 200}); + return new NextResponse(null, { status: HTTPResponses.OK }); } catch (e: any) { console.error(e); - return new NextResponse(null, {status: HTTPResponses.PRECONDITION_VALIDATION_FAILURE}); + return new NextResponse(null, { + status: HTTPResponses.PRECONDITION_VALIDATION_FAILURE + }); } finally { if (connection) connection.release(); } diff --git a/frontend/app/api/details/cmid/route.ts b/frontend/app/api/details/cmid/route.ts index 6535569b..c9f169f2 100644 --- a/frontend/app/api/details/cmid/route.ts +++ b/frontend/app/api/details/cmid/route.ts @@ -1,6 +1,7 @@ -import {NextRequest, NextResponse} from "next/server"; -import {getConn, runQuery} from "@/components/processors/processormacros"; -import {PoolConnection} from "mysql2/promise"; +import { NextRequest, NextResponse } from 'next/server'; +import { getConn, runQuery } from '@/components/processors/processormacros'; +import { PoolConnection } from 'mysql2/promise'; +import { HTTPResponses } from '@/config/macros'; export async function GET(request: NextRequest) { const cmID = parseInt(request.nextUrl.searchParams.get('cmid')!); @@ -10,32 +11,30 @@ export async function GET(request: NextRequest) { try { conn = await getConn(); const query = ` - SELECT - cm.CoreMeasurementID, - p.PlotName, - q.QuadratName, - c.PlotCensusNumber, - c.StartDate, - c.EndDate, - per.FirstName, - per.LastName, - s.SpeciesName - FROM - ${schema}.coremeasurements cm - INNER JOIN - ${schema}.plots p ON cm.PlotID = p.PlotID - INNER JOIN - ${schema}.quadrats q ON cm.QuadratID = q.QuadratID - INNER JOIN - ${schema}.census c ON cm.CensusID = c.CensusID - INNER JOIN - ${schema}.personnel per ON cm.PersonnelID = per.PersonnelID - INNER JOIN - ${schema}.trees t ON cm.TreeID = t.TreeID - INNER JOIN - ${schema}.species s ON t.SpeciesID = s.SpeciesID - WHERE - cm.CoreMeasurementID = ?;`; + SELECT + cm.CoreMeasurementID, + p.PlotName, + q.QuadratName, + c.PlotCensusNumber, + c.StartDate, + c.EndDate, + s.SpeciesName + FROM + ${schema}.coremeasurements cm + INNER JOIN + ${schema}.stems st ON cm.StemID = st.StemID + INNER JOIN + ${schema}.trees t ON st.TreeID = t.TreeID + INNER JOIN + ${schema}.species s ON t.SpeciesID = s.SpeciesID + INNER JOIN + ${schema}.quadrats q ON st.QuadratID = q.QuadratID + INNER JOIN + ${schema}.plots p ON q.PlotID = p.PlotID + INNER JOIN + ${schema}.census c ON cm.CensusID = c.CensusID + WHERE + cm.CoreMeasurementID = ?;`; const results = await runQuery(conn, query, [cmID]); return new NextResponse( JSON.stringify( @@ -44,15 +43,14 @@ export async function GET(request: NextRequest) { plotName: row.PlotName, quadratName: row.QuadratName, plotCensusNumber: row.PlotCensusNumber, - censusStart: row.StartDate, - censusEnd: row.EndDate, - personnelName: row.FirstName + ' ' + row.LastName, speciesName: row.SpeciesName })) - ), {status: 200}); + ), + { status: HTTPResponses.OK } + ); } catch (error: any) { throw new Error('SQL query failed: ' + error.message); } finally { if (conn) conn.release(); } -} \ No newline at end of file +} diff --git a/frontend/app/api/fetchall/[[...slugs]]/route.ts b/frontend/app/api/fetchall/[[...slugs]]/route.ts index f87640e1..9cd93c6f 100644 --- a/frontend/app/api/fetchall/[[...slugs]]/route.ts +++ b/frontend/app/api/fetchall/[[...slugs]]/route.ts @@ -1,72 +1,67 @@ -import {getConn, runQuery} from "@/components/processors/processormacros"; -import MapperFactory, {IDataMapper} from "@/config/datamapper"; -import {HTTPResponses} from "@/config/macros"; -import {PoolConnection} from "mysql2/promise"; -import {NextRequest, NextResponse} from "next/server"; +import { PoolConnection } from 'mysql2/promise'; +import { NextRequest, NextResponse } from 'next/server'; +import { getConn, runQuery } from '@/components/processors/processormacros'; +import MapperFactory from '@/config/datamapper'; +import { HTTPResponses } from '@/config/macros'; const buildQuery = (schema: string, fetchType: string, plotID?: string, plotCensusNumber?: string, quadratID?: string): string => { if (fetchType === 'plots') { return ` - SELECT - p.*, - COUNT(q.QuadratID) AS NumQuadrats - FROM - ${schema}.plots p - LEFT JOIN - ${schema}.quadrats q ON p.PlotID = q.PlotID - GROUP BY - p.PlotID - ${plotID && plotID !== 'undefined' && !isNaN(parseInt(plotID)) ? `HAVING p.PlotID = ${plotID}` : ''}`; - } - - let query = `SELECT * FROM ${schema}.${fetchType}`; - const conditions = []; + SELECT p.*, + COUNT(q.QuadratID) AS NumQuadrats + FROM ${schema}.plots p + LEFT JOIN + ${schema}.quadrats q ON p.PlotID = q.PlotID + GROUP BY p.PlotID + ${plotID && plotID !== 'undefined' && !isNaN(parseInt(plotID)) ? `HAVING p.PlotID = ${plotID}` : ''}`; + } else if (fetchType === 'roles') { + return `SELECT * + FROM ${schema}.${fetchType}`; + } else { + let query = `SELECT * + FROM ${schema}.${fetchType}`; + const conditions = []; - if (plotID && plotID !== 'undefined' && !isNaN(parseInt(plotID))) conditions.push(`PlotID = ${plotID}`); - if (plotCensusNumber && plotCensusNumber !== 'undefined' && !isNaN(parseInt(plotCensusNumber))) { - conditions.push(`CensusID IN ( - SELECT c.CensusID - FROM ${schema}.census c - WHERE c.PlotID = PlotID - AND c.PlotCensusNumber = ${plotCensusNumber})`); - } - if (quadratID && quadratID !== 'undefined' && !isNaN(parseInt(quadratID))) conditions.push(`QuadratID = ${quadratID}`); + if (plotID && plotID !== 'undefined' && !isNaN(parseInt(plotID)) && fetchType !== 'personnel') { + conditions.push(`PlotID = ${plotID}`); + } + if (plotCensusNumber && plotCensusNumber !== 'undefined' && !isNaN(parseInt(plotCensusNumber))) { + conditions.push(`CensusID IN (SELECT c.CensusID FROM ${schema}.census c WHERE c.PlotID = ${plotID} AND c.PlotCensusNumber = ${plotCensusNumber})`); + } + if (quadratID && quadratID !== 'undefined' && !isNaN(parseInt(quadratID))) { + conditions.push(`QuadratID = ${quadratID}`); + } - if (conditions.length > 0) { - query += ' WHERE ' + conditions.join(' AND '); + if (conditions.length > 0) { + query += ' WHERE ' + conditions.join(' AND '); + } + return query; } - return query; }; - -export async function GET(request: NextRequest, {params}: { params: { slugs?: string[] } }) { +// ordering: PCQ +export async function GET(request: NextRequest, { params }: { params: { slugs?: string[] } }) { const schema = request.nextUrl.searchParams.get('schema'); if (!schema || schema === 'undefined') { - throw new Error("Schema selection was not provided to API endpoint"); + throw new Error('Schema selection was not provided to API endpoint'); } - const [fetchType, plotID, censusID, quadratID] = params.slugs ?? []; - if (!fetchType) { - throw new Error("fetchType was not correctly provided"); + const [dataType, plotID, plotCensusNumber, quadratID] = params.slugs ?? []; + if (!dataType) { + throw new Error('fetchType was not correctly provided'); } - console.log('fetchall --> slugs provided: fetchType: ', fetchType, 'plotID: ', plotID, 'censusID: ', censusID, 'quadratID: ', quadratID); - const query = buildQuery(schema, fetchType, plotID, censusID, quadratID); + console.log('fetchall --> slugs provided: fetchType: ', dataType, 'plotID: ', plotID, 'plotcensusnumber: ', plotCensusNumber, 'quadratID: ', quadratID); + const query = buildQuery(schema, dataType, plotID, plotCensusNumber, quadratID); let conn: PoolConnection | null = null; try { conn = await getConn(); const results = await runQuery(conn, query); - if (!results) { - return new NextResponse(null, {status: 500}); - } - - const mapper: IDataMapper = MapperFactory.getMapper(fetchType); - const rows = mapper.mapData(results); - return new NextResponse(JSON.stringify(rows), {status: HTTPResponses.OK}); + return new NextResponse(JSON.stringify(MapperFactory.getMapper(dataType).mapData(results)), { status: HTTPResponses.OK }); } catch (error) { console.error('Error:', error); - throw new Error("Call failed"); + throw new Error('Call failed'); } finally { if (conn) conn.release(); } diff --git a/frontend/app/api/filehandlers/deletefile/route.ts b/frontend/app/api/filehandlers/deletefile/route.ts index 6e04f8a4..5f6d80d3 100644 --- a/frontend/app/api/filehandlers/deletefile/route.ts +++ b/frontend/app/api/filehandlers/deletefile/route.ts @@ -1,23 +1,31 @@ -import {NextRequest, NextResponse} from "next/server"; -import {getContainerClient} from "@/config/macros/azurestorage"; +import { NextRequest, NextResponse } from 'next/server'; +import { getContainerClient } from '@/config/macros/azurestorage'; +import { HTTPResponses } from '@/config/macros'; export async function DELETE(request: NextRequest) { const containerName = request.nextUrl.searchParams.get('container'); const filename = request.nextUrl.searchParams.get('filename'); if (!containerName || !filename) { - return new NextResponse('Container name and filename are required', {status: 400}); + return new NextResponse('Container name and filename are required', { + status: 400 + }); } try { const containerClient = await getContainerClient(containerName.toLowerCase()); // Adjust as needed - if (!containerClient) return new NextResponse('Container name and filename are required', {status: 400}); + if (!containerClient) + return new NextResponse('Container name and filename are required', { + status: 400 + }); const blobClient = containerClient.getBlobClient(filename); await blobClient.delete(); - return new NextResponse('File deleted successfully', {status: 200}); + return new NextResponse('File deleted successfully', { + status: HTTPResponses.OK + }); } catch (error) { console.error('Delete file error:', error); - return new NextResponse((error as Error).message, {status: 500}); + return new NextResponse((error as Error).message, { status: 500 }); } -} \ No newline at end of file +} diff --git a/frontend/app/api/filehandlers/downloadallfiles/route.ts b/frontend/app/api/filehandlers/downloadallfiles/route.ts index fe5fb7c5..16dbbf55 100644 --- a/frontend/app/api/filehandlers/downloadallfiles/route.ts +++ b/frontend/app/api/filehandlers/downloadallfiles/route.ts @@ -1,50 +1,51 @@ // DOWNLOAD ALL FILES ROUTE HANDLER -import {NextRequest, NextResponse} from "next/server"; -import {getContainerClient} from "@/config/macros/azurestorage"; - +import { NextRequest, NextResponse } from 'next/server'; +import { getContainerClient } from '@/config/macros/azurestorage'; +import { HTTPResponses } from '@/config/macros'; export async function GET(request: NextRequest) { const plot = request.nextUrl.searchParams.get('plot'); const census = request.nextUrl.searchParams.get('census'); if (!plot || !census) { - return new NextResponse('Both plot and census parameters are required', {status: 400}); + return new NextResponse('Both plot and census parameters are required', { + status: 400 + }); } const blobData: any = []; const containerClient = await getContainerClient(`${plot}-${census}`); if (!containerClient) { - return NextResponse.json({statusText: "Container client creation error"}, {status: 400}); + return NextResponse.json({ statusText: 'Container client creation error' }, { status: 400 }); } else { console.log(`container client created`); } const listOptions = { includeMetadata: true, - includeVersions: false, + includeVersions: false }; let i = 0; try { for await (const blob of containerClient.listBlobsFlat(listOptions)) { if (!blob) console.error('blob is undefined'); // blobData.push({ key: i.toString(), filename: blob.name, metadata: blob.metadata! }); - blobData.push( - { - key: ++i, - name: blob.name, - user: blob.metadata?.user, - formType: blob.metadata?.FormType, - fileErrors: blob.metadata?.FileErrorState ? JSON.parse(blob.metadata?.FileErrorState) : '', - date: blob.properties.lastModified - }); + blobData.push({ + key: ++i, + name: blob.name, + user: blob.metadata?.user, + formType: blob.metadata?.FormType, + fileErrors: blob.metadata?.FileErrorState ? JSON.parse(blob.metadata?.FileErrorState) : '', + date: blob.properties.lastModified + }); } return new NextResponse( JSON.stringify({ - responseMessage: "List of files", - blobData: blobData, + responseMessage: 'List of files', + blobData: blobData }), - {status: 200} + { status: HTTPResponses.OK } ); } catch (error: any) { console.error('error in blob listing: ', error); - return NextResponse.json({message: error.message}, {status: 400}); + return NextResponse.json({ message: error.message }, { status: 400 }); } } diff --git a/frontend/app/api/filehandlers/downloadfile/route.ts b/frontend/app/api/filehandlers/downloadfile/route.ts index c14b8fc5..6f8837b6 100644 --- a/frontend/app/api/filehandlers/downloadfile/route.ts +++ b/frontend/app/api/filehandlers/downloadfile/route.ts @@ -1,11 +1,7 @@ -import {NextRequest, NextResponse} from "next/server"; -import {getContainerClient} from "@/config/macros/azurestorage"; -import { - BlobSASPermissions, - BlobServiceClient, - generateBlobSASQueryParameters, - StorageSharedKeyCredential -} from "@azure/storage-blob"; +import { NextRequest, NextResponse } from 'next/server'; +import { getContainerClient } from '@/config/macros/azurestorage'; +import { BlobSASPermissions, BlobServiceClient, generateBlobSASQueryParameters, StorageSharedKeyCredential } from '@azure/storage-blob'; +import { HTTPResponses } from '@/config/macros'; export async function GET(request: NextRequest) { const containerName = request.nextUrl.searchParams.get('container'); @@ -13,13 +9,15 @@ export async function GET(request: NextRequest) { const storageAccountConnectionString = process.env.AZURE_STORAGE_CONNECTION_STRING; if (!containerName || !filename || !storageAccountConnectionString) { - return new NextResponse('Container name, filename, and storage connection string are required', {status: 400}); + return new NextResponse('Container name, filename, and storage connection string are required', { status: 400 }); } try { const containerClient = await getContainerClient(containerName.toLowerCase()); if (!containerClient) { - return new NextResponse('Failed to get container client', {status: 400}); + return new NextResponse('Failed to get container client', { + status: 400 + }); } const blobServiceClient = BlobServiceClient.fromConnectionString(storageAccountConnectionString); @@ -31,7 +29,7 @@ export async function GET(request: NextRequest) { blobName: filename, startsOn: new Date(), expiresOn: new Date(new Date().valueOf() + 3600 * 1000), // 1 hour expiration - permissions: BlobSASPermissions.parse("r") // read-only permission + permissions: BlobSASPermissions.parse('r') // read-only permission }; let sasToken = ''; if (blobServiceClient.credential instanceof StorageSharedKeyCredential) { @@ -39,14 +37,14 @@ export async function GET(request: NextRequest) { } const url = `${blobClient.url}?${sasToken}`; - return new NextResponse(JSON.stringify({url}), { - status: 200, + return new NextResponse(JSON.stringify({ url }), { + status: HTTPResponses.OK, headers: { 'Content-Type': 'application/json' } }); } catch (error) { console.error('Download file error:', error); - return new NextResponse((error as Error).message, {status: 500}); + return new NextResponse((error as Error).message, { status: 500 }); } } diff --git a/frontend/app/api/filehandlers/storageload/route.ts b/frontend/app/api/filehandlers/storageload/route.ts index 36bcaa1a..2b630d0c 100644 --- a/frontend/app/api/filehandlers/storageload/route.ts +++ b/frontend/app/api/filehandlers/storageload/route.ts @@ -1,6 +1,6 @@ -import {NextRequest, NextResponse} from 'next/server'; -import {HTTPResponses} from '@/config/macros'; -import {getContainerClient, uploadValidFileAsBuffer} from '@/config/macros/azurestorage'; +import { NextRequest, NextResponse } from 'next/server'; +import { HTTPResponses } from '@/config/macros'; +import { getContainerClient, uploadValidFileAsBuffer } from '@/config/macros/azurestorage'; export async function POST(request: NextRequest) { let formData: FormData; @@ -8,48 +8,54 @@ export async function POST(request: NextRequest) { formData = await request.formData(); if (formData === null || formData === undefined || formData.entries().next().done) throw new Error(); } catch (error) { - return new NextResponse('File is required', {status: 400}); + return new NextResponse('File is required', { status: 400 }); } - console.log("formData: ", formData); - const fileName = request.nextUrl.searchParams.get('fileName')?.trim() ; - const plot = request.nextUrl.searchParams.get("plot")?.trim(); - const census = request.nextUrl.searchParams.get("census")?.trim(); - const user = request.nextUrl.searchParams.get("user"); + const fileName = request.nextUrl.searchParams.get('fileName')?.trim(); + const plot = request.nextUrl.searchParams.get('plot')?.trim(); + const census = request.nextUrl.searchParams.get('census')?.trim(); + const user = request.nextUrl.searchParams.get('user'); const formType = request.nextUrl.searchParams.get('formType'); const file = formData.get(fileName ?? 'file') as File | null; const fileRowErrors = formData.get('fileRowErrors') ? JSON.parse(formData.get('fileRowErrors')) : []; - if ((file === null || file === undefined) || - (fileName === undefined || fileName === null) || - (plot === undefined || plot === null) || - (census === undefined || census === null) || - (user === undefined || user === null) || - (formType === undefined || formType === null)) { - - return new NextResponse('Missing required parameters', {status: 400}); + if ( + file === null || + file === undefined || + fileName === undefined || + fileName === null || + plot === undefined || + plot === null || + census === undefined || + census === null || + user === undefined || + user === null || + formType === undefined || + formType === null + ) { + return new NextResponse('Missing required parameters', { status: 400 }); } let containerClient; try { containerClient = await getContainerClient(`${plot.toLowerCase()}-${census.toLowerCase()}`); } catch (error: any) { - console.error("Error getting container client:", error.message); + console.error('Error getting container client:', error.message); return new NextResponse( JSON.stringify({ - responseMessage: "Error getting container client.", - error: error.message, + responseMessage: 'Error getting container client.', + error: error.message }), - {status: HTTPResponses.INTERNAL_SERVER_ERROR} + { status: HTTPResponses.INTERNAL_SERVER_ERROR } ); } if (!containerClient) { - console.error("Container client is undefined."); + console.error('Container client is undefined.'); return new NextResponse( JSON.stringify({ - responseMessage: "Container client is undefined", + responseMessage: 'Container client is undefined' }), - {status: HTTPResponses.INTERNAL_SERVER_ERROR} + { status: HTTPResponses.INTERNAL_SERVER_ERROR } ); } @@ -57,18 +63,18 @@ export async function POST(request: NextRequest) { const uploadResponse = await uploadValidFileAsBuffer(containerClient, file, user, formType, fileRowErrors); console.log(`upload complete: ${uploadResponse?.requestId}`); if (uploadResponse && (uploadResponse._response.status < 200 || uploadResponse._response.status >= 300)) { - throw new Error("Failure: Response status not between 200 & 299"); + throw new Error('Failure: Response status not between 200 & 299'); } } catch (error: any) { - console.error("File processing error:", error); + console.error('File processing error:', error); return new NextResponse( JSON.stringify({ - responseMessage: "File Processing error", - error: error.message ? error.message : 'Unknown error', + responseMessage: 'File Processing error', + error: error.message ? error.message : 'Unknown error' }), - {status: HTTPResponses.INTERNAL_SERVER_ERROR} + { status: HTTPResponses.INTERNAL_SERVER_ERROR } ); } - return new NextResponse(JSON.stringify({message: "Insert to Azure Storage successful"}), {status: 200}); + return new NextResponse(JSON.stringify({ message: 'Insert to Azure Storage successful' }), { status: HTTPResponses.OK }); } diff --git a/frontend/app/api/fixeddata/[dataType]/[[...slugs]]/route.ts b/frontend/app/api/fixeddata/[dataType]/[[...slugs]]/route.ts index 41e3fe7e..6c9c03f3 100644 --- a/frontend/app/api/fixeddata/[dataType]/[[...slugs]]/route.ts +++ b/frontend/app/api/fixeddata/[dataType]/[[...slugs]]/route.ts @@ -1,35 +1,38 @@ -import {getConn, runQuery} from "@/components/processors/processormacros"; -import MapperFactory from "@/config/datamapper"; -import {handleError} from "@/utils/errorhandler"; -import {PoolConnection, format} from "mysql2/promise"; -import {NextRequest, NextResponse} from "next/server"; +import { getConn, runQuery } from '@/components/processors/processormacros'; +import MapperFactory from '@/config/datamapper'; +import { handleError } from '@/utils/errorhandler'; +import { format, PoolConnection } from 'mysql2/promise'; +import { NextRequest, NextResponse } from 'next/server'; import { - generateInsertOperations, - generateUpdateOperations, - StemDimensionsViewQueryConfig, AllTaxonomiesViewQueryConfig, - StemTaxonomiesViewQueryConfig, + handleDeleteForSlices, + handleUpsertForSlices, + StemTaxonomiesViewQueryConfig } from '@/components/processors/processorhelperfunctions'; -import { HTTPResponses } from "@/config/macros"; - -// slugs SHOULD CONTAIN AT MINIMUM: schema, page, pageSize, plotID, plotCensusNumber, (optional) quadratID -export async function GET(request: NextRequest, {params}: { - params: { dataType: string, slugs?: string[] } -}): Promise> { - if (!params.slugs || params.slugs.length < 5) throw new Error("slugs not received."); - const [schema, pageParam, pageSizeParam, plotIDParam, plotCensusNumberParam, quadratIDParam] = params.slugs; - if ((!schema || schema === 'undefined') || (!pageParam || pageParam === 'undefined') || (!pageSizeParam || pageSizeParam === 'undefined')) throw new Error("core slugs schema/page/pageSize not correctly received"); +import { HTTPResponses } from '@/config/macros'; // slugs SHOULD CONTAIN AT MINIMUM: schema, page, pageSize, plotID, plotCensusNumber, (optional) quadratID, (optional) speciesID + +// slugs SHOULD CONTAIN AT MINIMUM: schema, page, pageSize, plotID, plotCensusNumber, (optional) quadratID, (optional) speciesID +export async function GET( + request: NextRequest, + { + params + }: { + params: { dataType: string; slugs?: string[] }; + } +): Promise> { + if (!params.slugs || params.slugs.length < 5) throw new Error('slugs not received.'); + const [schema, pageParam, pageSizeParam, plotIDParam, plotCensusNumberParam, quadratIDParam, speciesIDParam] = params.slugs; + if (!schema || schema === 'undefined' || !pageParam || pageParam === 'undefined' || !pageSizeParam || pageSizeParam === 'undefined') + throw new Error('core slugs schema/page/pageSize not correctly received'); const page = parseInt(pageParam); const pageSize = parseInt(pageSizeParam); - - if ((!plotIDParam || plotIDParam === '0') || (!plotCensusNumberParam || plotCensusNumberParam === '0')) throw new Error("Core plot/census information not received"); - const plotID = parseInt(plotIDParam); - const plotCensusNumber = parseInt(plotCensusNumberParam); + const plotID = plotIDParam ? parseInt(plotIDParam) : undefined; + const plotCensusNumber = plotCensusNumberParam ? parseInt(plotCensusNumberParam) : undefined; const quadratID = quadratIDParam ? parseInt(quadratIDParam) : undefined; + const speciesID = speciesIDParam ? parseInt(speciesIDParam) : undefined; let conn: PoolConnection | null = null; let updatedMeasurementsExist = false; let censusIDs; - let mostRecentCensusID: any; let pastCensusIDs: string | any[]; try { @@ -38,101 +41,131 @@ export async function GET(request: NextRequest, {params}: { const queryParams: any[] = []; switch (params.dataType) { + case 'validationprocedures': + paginatedQuery = ` + SELECT SQL_CALC_FOUND_ROWS * + FROM catalog.${params.dataType} LIMIT ?, ?;`; // validation procedures is special + queryParams.push(page * pageSize, pageSize); + break; + case 'specieslimits': + paginatedQuery = `SELECT SQL_CALC_FOUND_ROWS * FROM ${schema}.${params.dataType} pdt WHERE pdt.SpeciesID = ? LIMIT ?, ?`; + queryParams.push(speciesID, page * pageSize, pageSize); + break; case 'attributes': case 'species': - case 'personnel': case 'stems': case 'alltaxonomiesview': case 'stemtaxonomiesview': case 'quadratpersonnel': - paginatedQuery = ` - SELECT SQL_CALC_FOUND_ROWS * - FROM ${schema}.${params.dataType} - LIMIT ?, ?`; + case 'sitespecificvalidations': + case 'roles': + paginatedQuery = `SELECT SQL_CALC_FOUND_ROWS * FROM ${schema}.${params.dataType} LIMIT ?, ?`; queryParams.push(page * pageSize, pageSize); break; + case 'personnel': + paginatedQuery = ` + SELECT SQL_CALC_FOUND_ROWS q.* + FROM ${schema}.${params.dataType} q + JOIN ${schema}.census c ON q.CensusID = c.CensusID + WHERE c.PlotID = ? + AND c.PlotCensusNumber = ? LIMIT ?, ?;`; + queryParams.push(plotID, plotCensusNumber, page * pageSize, pageSize); + break; case 'quadrats': paginatedQuery = ` - SELECT SQL_CALC_FOUND_ROWS q.* - FROM ${schema}.quadrats q - WHERE q.PlotID = ? - AND q.CensusID IN ( - SELECT c.CensusID - FROM ${schema}.census c - WHERE c.PlotID = q.PlotID - AND c.PlotCensusNumber = ? - ) - GROUP BY q.QuadratID - LIMIT ?, ?`; + SELECT SQL_CALC_FOUND_ROWS q.* + FROM ${schema}.${params.dataType} q + JOIN ${schema}.census c ON q.PlotID = c.PlotID AND q.CensusID = c.CensusID + WHERE q.PlotID = ? + AND c.PlotID = ? + AND c.PlotCensusNumber = ? LIMIT ?, ?;`; + queryParams.push(plotID, plotID, plotCensusNumber, page * pageSize, pageSize); + break; + case 'personnelrole': + paginatedQuery = ` + SELECT SQL_CALC_FOUND_ROWS + p.PersonnelID, + p.CensusID, + p.FirstName, + p.LastName, + r.RoleName, + r.RoleDescription + FROM + personnel p + LEFT JOIN + roles r ON p.RoleID = r.RoleID + census c ON p.CensusID = c.CensusID + WHERE c.PlotID = ? AND c.PlotCensusNumber = ? LIMIT ?, ?;`; queryParams.push(plotID, plotCensusNumber, page * pageSize, pageSize); break; + case 'measurementssummary': + case 'measurementssummaryview': + case 'viewfulltable': + case 'viewfulltableview': + paginatedQuery = ` + SELECT SQL_CALC_FOUND_ROWS q.* + FROM ${schema}.${params.dataType} q + JOIN ${schema}.census c ON q.PlotID = c.PlotID AND q.CensusID = c.CensusID + WHERE q.PlotID = ? + AND c.PlotID = ? + AND c.PlotCensusNumber = ? + ORDER BY q.MeasurementDate ASC LIMIT ?, ?;`; + queryParams.push(plotID, plotID, plotCensusNumber, page * pageSize, pageSize); + break; case 'subquadrats': if (!quadratID || quadratID === 0) { - throw new Error("QuadratID must be provided as part of slug fetch query, referenced fixeddata slug route"); + throw new Error('QuadratID must be provided as part of slug fetch query, referenced fixeddata slug route'); } paginatedQuery = ` - SELECT SQL_CALC_FOUND_ROWS s.* - FROM ${schema}.subquadrats s - JOIN ${schema}.quadrats q ON s.QuadratID = q.QuadratID - WHERE q.QuadratID = ? - AND q.PlotID = ? - AND q.CensusID IN ( - SELECT c.CensusID - FROM ${schema}.census c - WHERE c.PlotID = q.PlotID - AND c.PlotCensusNumber = ? - ) - LIMIT ?, ?`; - queryParams.push(quadratID, plotID, plotCensusNumber, page * pageSize, pageSize); + SELECT SQL_CALC_FOUND_ROWS s.* + FROM ${schema}.subquadrats s + JOIN ${schema}.quadrats q ON s.QuadratID = q.QuadratID + JOIN ${schema}.census c ON q.CensusID = c.CensusID + WHERE q.QuadratID = ? + AND q.PlotID = ? + AND c.PlotID = ? + AND c.PlotCensusNumber = ? LIMIT ?, ?;`; + queryParams.push(quadratID, plotID, plotID, plotCensusNumber, page * pageSize, pageSize); break; case 'census': paginatedQuery = ` - SELECT SQL_CALC_FOUND_ROWS * - FROM ${schema}.census - WHERE PlotID = ? - LIMIT ?, ?`; + SELECT SQL_CALC_FOUND_ROWS * + FROM ${schema}.census + WHERE PlotID = ? LIMIT ?, ?`; queryParams.push(plotID, page * pageSize, pageSize); break; case 'coremeasurements': - case 'measurementssummaryview': - case 'stemdimensionsview': // Retrieve multiple past CensusID for the given PlotCensusNumber const censusQuery = ` - SELECT CensusID - FROM ${schema}.census - WHERE PlotID = ? - AND PlotCensusNumber = ? - ORDER BY StartDate DESC - LIMIT 30 + SELECT CensusID + FROM ${schema}.census + WHERE PlotID = ? + AND PlotCensusNumber = ? + ORDER BY StartDate DESC LIMIT 30 `; const censusResults = await runQuery(conn, format(censusQuery, [plotID, plotCensusNumber])); if (censusResults.length < 2) { paginatedQuery = ` - SELECT SQL_CALC_FOUND_ROWS * - FROM ${schema}.${params.dataType} - WHERE PlotID = ? - AND CensusID IN ( - SELECT c.CensusID - FROM ${schema}.census c - WHERE c.PlotID = PlotID + SELECT SQL_CALC_FOUND_ROWS pdt.* + FROM ${schema}.${params.dataType} pdt + JOIN ${schema}.census c ON pdt.CensusID = c.CensusID + WHERE c.PlotID = ? AND c.PlotCensusNumber = ? - ) - LIMIT ?, ?`; + ORDER BY pdt.MeasurementDate LIMIT ?, ?`; queryParams.push(plotID, plotCensusNumber, page * pageSize, pageSize); break; } else { updatedMeasurementsExist = true; censusIDs = censusResults.map((c: any) => c.CensusID); - mostRecentCensusID = censusIDs[0]; pastCensusIDs = censusIDs.slice(1); // Query to fetch paginated measurements from measurementssummaryview paginatedQuery = ` - SELECT SQL_CALC_FOUND_ROWS * - FROM ${schema}.measurementssummaryview - WHERE PlotID = ? - AND CensusID IN (${censusIDs.map(() => '?').join(', ')}) - LIMIT ?, ? - `; + SELECT SQL_CALC_FOUND_ROWS pdt.* + FROM ${schema}.${params.dataType} pdt + JOIN ${schema}.census c ON sp.CensusID = c.CensusID + WHERE c.PlotID = ? + AND c.CensusID IN (${censusIDs.map(() => '?').join(', ')}) + ORDER BY pdt.MeasurementDate ASC LIMIT ?, ?`; queryParams.push(plotID, ...censusIDs, page * pageSize, pageSize); break; } @@ -142,12 +175,12 @@ export async function GET(request: NextRequest, {params}: { // Ensure query parameters match the placeholders in the query if (paginatedQuery.match(/\?/g)?.length !== queryParams.length) { - throw new Error("Mismatch between query placeholders and parameters"); + throw new Error('Mismatch between query placeholders and parameters'); } const paginatedResults = await runQuery(conn, format(paginatedQuery, queryParams)); - const totalRowsQuery = "SELECT FOUND_ROWS() as totalRows"; + const totalRowsQuery = 'SELECT FOUND_ROWS() as totalRows'; const totalRowsResult = await runQuery(conn, totalRowsQuery); const totalRows = totalRowsResult[0].totalRows; @@ -157,29 +190,25 @@ export async function GET(request: NextRequest, {params}: { // Ensure deprecated measurements are duplicates const uniqueKeys = ['PlotID', 'QuadratID', 'TreeID', 'StemID']; // Define unique keys that should match - const outputKeys = paginatedResults.map((row: any) => - uniqueKeys.map((key) => row[key]).join('|') + const outputKeys = paginatedResults.map((row: any) => uniqueKeys.map(key => row[key]).join('|')); + const filteredDeprecated = deprecated.filter((row: any) => outputKeys.includes(uniqueKeys.map(key => row[key]).join('|'))); + return new NextResponse( + JSON.stringify({ + output: MapperFactory.getMapper(params.dataType).mapData(paginatedResults), + deprecated: MapperFactory.getMapper(params.dataType).mapData(filteredDeprecated), + totalCount: totalRows + }), + { status: HTTPResponses.OK } ); - const filteredDeprecated = deprecated.filter((row: any) => - outputKeys.includes(uniqueKeys.map((key) => row[key]).join('|')) - ); - // Map data using the appropriate mapper - const mapper = MapperFactory.getMapper(params.dataType); - const deprecatedRows = mapper.mapData(filteredDeprecated); - const rows = mapper.mapData(paginatedResults); - return new NextResponse(JSON.stringify({ - output: rows, - deprecated: deprecatedRows, - totalCount: totalRows - }), {status: 200}); } else { - const mapper = MapperFactory.getMapper(params.dataType); - const rows = mapper.mapData(paginatedResults); - return new NextResponse(JSON.stringify({ - output: rows, - deprecated: undefined, - totalCount: totalRows - }), {status: 200}); + return new NextResponse( + JSON.stringify({ + output: MapperFactory.getMapper(params.dataType).mapData(paginatedResults), + deprecated: undefined, + totalCount: totalRows + }), + { status: HTTPResponses.OK } + ); } } catch (error: any) { if (conn) await conn.rollback(); @@ -190,26 +219,28 @@ export async function GET(request: NextRequest, {params}: { } // required dynamic parameters: dataType (fixed),[ schema, gridID value] -> slugs -export async function POST(request: NextRequest, {params}: { params: { dataType: string, slugs?: string[] } }) { - if (!params.slugs) throw new Error("slugs not provided"); +export async function POST(request: NextRequest, { params }: { params: { dataType: string; slugs?: string[] } }) { + if (!params.slugs) throw new Error('slugs not provided'); const [schema, gridID] = params.slugs; - if (!schema || !gridID) throw new Error("no schema or gridID provided"); + if (!schema || !gridID) throw new Error('no schema or gridID provided'); + let conn: PoolConnection | null = null; - const {newRow} = await request.json(); + const { newRow } = await request.json(); + let insertIDs: { [key: string]: number } = {}; + try { conn = await getConn(); await conn.beginTransaction(); + if (Object.keys(newRow).includes('isNew')) delete newRow.isNew; - const mapper = MapperFactory.getMapper(params.dataType); - const newRowData = mapper.demapData([newRow])[0]; + + const newRowData = MapperFactory.getMapper(params.dataType).demapData([newRow])[0]; const demappedGridID = gridID.charAt(0).toUpperCase() + gridID.substring(1); + // Handle SQL views with handleUpsertForSlices if (params.dataType.includes('view')) { let queryConfig; switch (params.dataType) { - case 'stemdimensionsview': - queryConfig = StemDimensionsViewQueryConfig; - break; case 'alltaxonomiesview': queryConfig = AllTaxonomiesViewQueryConfig; break; @@ -217,23 +248,32 @@ export async function POST(request: NextRequest, {params}: { params: { dataType: queryConfig = StemTaxonomiesViewQueryConfig; break; default: - throw new Error("incorrect view call"); - } - const insertQueries = generateInsertOperations(schema, newRow, queryConfig); - for (const query of insertQueries) { - await runQuery(conn, query); + throw new Error('Incorrect view call'); } - } else if (params.dataType === 'attributes') { + + // Use handleUpsertForSlices and retrieve the insert IDs + insertIDs = await handleUpsertForSlices(conn, schema, newRowData, queryConfig); + } + + // Handle the case for 'attributes' + else if (params.dataType === 'attributes') { const insertQuery = format('INSERT INTO ?? SET ?', [`${schema}.${params.dataType}`, newRowData]); - await runQuery(conn, insertQuery); - } else { + const results = await runQuery(conn, insertQuery); + insertIDs = { attributes: results.insertId }; // Standardize output with table name as key + } + + // Handle all other cases + else { delete newRowData[demappedGridID]; if (params.dataType === 'plots') delete newRowData.NumQuadrats; const insertQuery = format('INSERT INTO ?? SET ?', [`${schema}.${params.dataType}`, newRowData]); - await runQuery(conn, insertQuery); + const results = await runQuery(conn, insertQuery); + insertIDs = { [params.dataType]: results.insertId }; // Standardize output with table name as key } + + // Commit the transaction and return the standardized response await conn.commit(); - return NextResponse.json({message: "Insert successful"}, {status: 200}); + return NextResponse.json({ message: 'Insert successful', createdIDs: insertIDs }, { status: HTTPResponses.OK }); } catch (error: any) { return handleError(error, conn, newRow); } finally { @@ -242,29 +282,24 @@ export async function POST(request: NextRequest, {params}: { params: { dataType: } // slugs: schema, gridID -export async function PATCH(request: NextRequest, {params}: { params: { dataType: string, slugs?: string[] } }) { - if (!params.slugs) throw new Error("slugs not provided"); +export async function PATCH(request: NextRequest, { params }: { params: { dataType: string; slugs?: string[] } }) { + if (!params.slugs) throw new Error('slugs not provided'); const [schema, gridID] = params.slugs; - if (!schema || !gridID) throw new Error("no schema or gridID provided"); + if (!schema || !gridID) throw new Error('no schema or gridID provided'); + let conn: PoolConnection | null = null; const demappedGridID = gridID.charAt(0).toUpperCase() + gridID.substring(1); - const {newRow, oldRow} = await request.json(); + const { newRow, oldRow } = await request.json(); + let updateIDs: { [key: string]: number } = {}; + try { conn = await getConn(); await conn.beginTransaction(); - if (!['alltaxonomiesview', 'stemdimensionsview', 'stemtaxonomiesview', 'measurementssummaryview'].includes(params.dataType)) { - const mapper = MapperFactory.getMapper(params.dataType); - const newRowData = mapper.demapData([newRow])[0]; - const {[demappedGridID]: gridIDKey, ...remainingProperties} = newRowData; - const updateQuery = format(`UPDATE ?? SET ? WHERE ?? = ?`, [`${schema}.${params.dataType}`, remainingProperties, demappedGridID, gridIDKey]); - await runQuery(conn, updateQuery); - await conn.commit(); - } else { + + // Handle views with handleUpsertForSlices (applies to both insert and update logic) + if (['alltaxonomiesview', 'stemtaxonomiesview'].includes(params.dataType)) { let queryConfig; switch (params.dataType) { - case 'stemdimensionsview': - queryConfig = StemDimensionsViewQueryConfig; - break; case 'alltaxonomiesview': queryConfig = AllTaxonomiesViewQueryConfig; break; @@ -272,15 +307,38 @@ export async function PATCH(request: NextRequest, {params}: { params: { dataType queryConfig = StemTaxonomiesViewQueryConfig; break; default: - throw new Error("incorrect view call"); - } - const updateQueries = generateUpdateOperations(schema, newRow, oldRow, queryConfig); - for (const query of updateQueries) { - await runQuery(conn, query); + throw new Error('Incorrect view call'); } - await conn.commit(); + + // Use handleUpsertForSlices for update operations as well (updates where needed) + updateIDs = await handleUpsertForSlices(conn, schema, newRow, queryConfig); } - return NextResponse.json({message: "Update successful"}, {status: 200}); + + // Handle non-view table updates + else { + const newRowData = MapperFactory.getMapper(params.dataType).demapData([newRow])[0]; + const { [demappedGridID]: gridIDKey, ...remainingProperties } = newRowData; + + // Construct the UPDATE query + const updateQuery = format( + `UPDATE ?? + SET ? + WHERE ?? = ?`, + [`${schema}.${params.dataType}`, remainingProperties, demappedGridID, gridIDKey] + ); + + // Execute the UPDATE query + await runQuery(conn, updateQuery); + + // For non-view tables, standardize the response format + updateIDs = { [params.dataType]: gridIDKey }; + } + + // Commit the transaction + await conn.commit(); + + // Return a standardized response with updated IDs + return NextResponse.json({ message: 'Update successful', updatedIDs: updateIDs }, { status: HTTPResponses.OK }); } catch (error: any) { return handleError(error, conn, newRow); } finally { @@ -288,58 +346,65 @@ export async function PATCH(request: NextRequest, {params}: { params: { dataType } } -// Define mappings for views to base tables and primary keys -const viewToTableMappings: Record = { - 'alltaxonomiesview': { table: 'species', primaryKey: 'SpeciesID' }, - 'stemdimensionsview': { table: 'stems', primaryKey: 'StemID' }, - 'stemtaxonomiesview': { table: 'stems', primaryKey: 'StemID' }, - 'measurementssummaryview': { table: 'coremeasurements', primaryKey: 'CoreMeasurementID' }, -}; - // slugs: schema, gridID // body: full data row, only need first item from it this time though -export async function DELETE(request: NextRequest, { params }: { params: { dataType: string, slugs?: string[] } }) { - if (!params.slugs) throw new Error("slugs not provided"); +export async function DELETE(request: NextRequest, { params }: { params: { dataType: string; slugs?: string[] } }) { + if (!params.slugs) throw new Error('slugs not provided'); const [schema, gridID] = params.slugs; - if (!schema || !gridID) throw new Error("no schema or gridID provided"); + if (!schema || !gridID) throw new Error('no schema or gridID provided'); let conn: PoolConnection | null = null; const demappedGridID = gridID.charAt(0).toUpperCase() + gridID.substring(1); const { newRow } = await request.json(); + console.log('newrow: ', newRow); try { conn = await getConn(); await conn.beginTransaction(); // Handle deletion for views - if (['alltaxonomiesview', 'stemdimensionsview', 'stemtaxonomiesview', 'measurementssummaryview'].includes(params.dataType)) { - const mapper = MapperFactory.getMapper(params.dataType); - const deleteRowData = mapper.demapData([newRow])[0]; - const viewConfig = viewToTableMappings[params.dataType]; - if (!viewConfig) throw new Error(`No table mapping found for view ${params.dataType}`); + if (['alltaxonomiesview', 'stemtaxonomiesview', 'measurementssummaryview'].includes(params.dataType)) { + const deleteRowData = MapperFactory.getMapper(params.dataType).demapData([newRow])[0]; - const { [viewConfig.primaryKey]: primaryKeyValue } = deleteRowData; - if (!primaryKeyValue) throw new Error(`Primary key value missing for ${viewConfig.primaryKey} in view ${params.dataType}`); + // Prepare query configuration based on view + let queryConfig; + switch (params.dataType) { + case 'alltaxonomiesview': + queryConfig = AllTaxonomiesViewQueryConfig; + break; + case 'stemtaxonomiesview': + queryConfig = StemTaxonomiesViewQueryConfig; + break; + default: + throw new Error('Incorrect view call'); + } + + // Use handleDeleteForSlices for handling deletion, taking foreign key constraints into account + await handleDeleteForSlices(conn, schema, deleteRowData, queryConfig); - const deleteQuery = format(`DELETE FROM ?? WHERE ?? = ?`, [`${schema}.${viewConfig.table}`, viewConfig.primaryKey, primaryKeyValue]); - await runQuery(conn, deleteQuery); await conn.commit(); - return NextResponse.json({ message: "Delete successful" }, { status: 200 }); + return NextResponse.json({ message: 'Delete successful' }, { status: HTTPResponses.OK }); } + // Handle deletion for tables - const mapper = MapperFactory.getMapper(params.dataType); - const deleteRowData = mapper.demapData([newRow])[0]; - const { [demappedGridID]: gridIDKey, ...remainingProperties } = deleteRowData; + const deleteRowData = MapperFactory.getMapper(params.dataType).demapData([newRow])[0]; + const { [demappedGridID]: gridIDKey } = deleteRowData; const deleteQuery = format(`DELETE FROM ?? WHERE ?? = ?`, [`${schema}.${params.dataType}`, demappedGridID, gridIDKey]); await runQuery(conn, deleteQuery); await conn.commit(); - return NextResponse.json({ message: "Delete successful" }, { status: 200 }); + return NextResponse.json({ message: 'Delete successful' }, { status: HTTPResponses.OK }); } catch (error: any) { if (error.code === 'ER_ROW_IS_REFERENCED_2') { const referencingTableMatch = error.message.match(/CONSTRAINT `(.*?)` FOREIGN KEY \(`(.*?)`\) REFERENCES `(.*?)`/); const referencingTable = referencingTableMatch ? referencingTableMatch[3] : 'unknown'; - return NextResponse.json({ message: "Foreign key conflict detected", referencingTable }, { status: HTTPResponses.FOREIGN_KEY_CONFLICT }); + return NextResponse.json( + { + message: 'Foreign key conflict detected', + referencingTable + }, + { status: HTTPResponses.FOREIGN_KEY_CONFLICT } + ); } return handleError(error, conn, newRow); } finally { if (conn) conn.release(); } -} \ No newline at end of file +} diff --git a/frontend/app/api/formsearch/attributes/route.ts b/frontend/app/api/formsearch/attributes/route.ts index 87121c98..e4801463 100644 --- a/frontend/app/api/formsearch/attributes/route.ts +++ b/frontend/app/api/formsearch/attributes/route.ts @@ -1,20 +1,22 @@ -import {NextRequest, NextResponse} from "next/server"; -import {getConn, runQuery} from "@/components/processors/processormacros"; -import {FORMSEARCH_LIMIT} from "@/config/macros/azurestorage"; +import { NextRequest, NextResponse } from 'next/server'; +import { getConn, runQuery } from '@/components/processors/processormacros'; +import { FORMSEARCH_LIMIT } from '@/config/macros/azurestorage'; +import { HTTPResponses } from '@/config/macros'; export async function GET(request: NextRequest): Promise> { const schema = request.nextUrl.searchParams.get('schema'); - if ((!schema || schema === 'undefined')) throw new Error('no schema provided!'); + if (!schema || schema === 'undefined') throw new Error('no schema provided!'); const partialCode = request.nextUrl.searchParams.get('searchfor')!; const conn = await getConn(); try { - const query = partialCode === '' ? - `SELECT DISTINCT Code FROM ${schema}.attributes ORDER BY Code LIMIT ${FORMSEARCH_LIMIT}` : - `SELECT DISTINCT Code FROM ${schema}.attributes WHERE Code LIKE ? ORDER BY Code LIMIT ${FORMSEARCH_LIMIT}`; + const query = + partialCode === '' + ? `SELECT DISTINCT Code FROM ${schema}.attributes ORDER BY Code LIMIT ${FORMSEARCH_LIMIT}` + : `SELECT DISTINCT Code FROM ${schema}.attributes WHERE Code LIKE ? ORDER BY Code LIMIT ${FORMSEARCH_LIMIT}`; const queryParams = partialCode === '' ? [] : [`%${partialCode}%`]; const results = await runQuery(conn, query, queryParams); - return new NextResponse(JSON.stringify(results.map((row: any) => row.Code)), {status: 200}); + return new NextResponse(JSON.stringify(results.map((row: any) => row.Code)), { status: HTTPResponses.OK }); } catch (error: any) { console.error('Error in GET Attributes:', error.message || error); throw new Error('Failed to fetch attribute data'); diff --git a/frontend/app/api/formsearch/personnel/route.ts b/frontend/app/api/formsearch/personnel/route.ts index 5b2ef6ee..fe91e7aa 100644 --- a/frontend/app/api/formsearch/personnel/route.ts +++ b/frontend/app/api/formsearch/personnel/route.ts @@ -1,19 +1,21 @@ -import {NextRequest, NextResponse} from "next/server"; -import {getConn, runQuery} from "@/components/processors/processormacros"; -import {FORMSEARCH_LIMIT} from "@/config/macros/azurestorage"; +import { NextRequest, NextResponse } from 'next/server'; +import { getConn, runQuery } from '@/components/processors/processormacros'; +import { FORMSEARCH_LIMIT } from '@/config/macros/azurestorage'; +import { HTTPResponses } from '@/config/macros'; export async function GET(request: NextRequest): Promise> { const schema = request.nextUrl.searchParams.get('schema'); - if ((!schema || schema === 'undefined')) throw new Error('no schema provided!'); + if (!schema || schema === 'undefined') throw new Error('no schema provided!'); const partialLastName = request.nextUrl.searchParams.get('searchfor')!; const conn = await getConn(); try { - const query = partialLastName === '' ? - `SELECT FirstName, LastName + const query = + partialLastName === '' + ? `SELECT FirstName, LastName FROM ${schema}.personnel ORDER BY LastName - LIMIT ${FORMSEARCH_LIMIT}` : - `SELECT FirstName, LastName + LIMIT ${FORMSEARCH_LIMIT}` + : `SELECT FirstName, LastName FROM ${schema}.personnel WHERE LastName LIKE ? ORDER BY LastName @@ -22,7 +24,7 @@ export async function GET(request: NextRequest): Promise> const results = await runQuery(conn, query, queryParams); // Properly mapping results to return an array of { label, code } - return new NextResponse(JSON.stringify(results.map((row: any) => `${row.FirstName} ${row.LastName}`)), {status: 200}); + return new NextResponse(JSON.stringify(results.map((row: any) => `${row.FirstName} ${row.LastName}`)), { status: HTTPResponses.OK }); } catch (error: any) { console.error('Error in GET Personnel:', error.message || error); throw new Error('Failed to fetch personnel data'); diff --git a/frontend/app/api/formsearch/personnelblock/route.ts b/frontend/app/api/formsearch/personnelblock/route.ts index 3e5df2be..192ef806 100644 --- a/frontend/app/api/formsearch/personnelblock/route.ts +++ b/frontend/app/api/formsearch/personnelblock/route.ts @@ -1,8 +1,9 @@ -import {NextRequest, NextResponse} from "next/server"; -import {PoolConnection} from "mysql2/promise"; -import {getConn, runQuery} from "@/components/processors/processormacros"; -import {PersonnelRDS, PersonnelResult} from '@/config/sqlrdsdefinitions/tables/personnelrds'; -import {FORMSEARCH_LIMIT} from "@/config/macros/azurestorage"; +import { NextRequest, NextResponse } from 'next/server'; +import { PoolConnection } from 'mysql2/promise'; +import { getConn, runQuery } from '@/components/processors/processormacros'; +import { FORMSEARCH_LIMIT } from '@/config/macros/azurestorage'; +import { HTTPResponses } from '@/config/macros'; +import { PersonnelRDS, PersonnelResult } from '@/config/sqlrdsdefinitions/personnel'; export async function GET(request: NextRequest): Promise> { const schema = request.nextUrl.searchParams.get('schema'); @@ -10,12 +11,13 @@ export async function GET(request: NextRequest): Promise { let conn: PoolConnection | null = null; const schema = request.nextUrl.searchParams.get('schema'); const quadratID = parseInt(request.nextUrl.searchParams.get('quadratID')!, 10); - if ((!schema || schema === 'undefined') || isNaN(quadratID)) throw new Error('Missing required parameters'); + if (!schema || schema === 'undefined' || isNaN(quadratID)) throw new Error('Missing required parameters'); try { const updatedPersonnelIDs: number[] = await request.json(); @@ -76,11 +79,11 @@ export async function PUT(request: NextRequest): Promise { // Commit the transaction await conn.commit(); - return NextResponse.json({message: "Personnel updated successfully"}, {status: 200}); + return NextResponse.json({ message: 'Personnel updated successfully' }, { status: HTTPResponses.OK }); } catch (error) { await conn?.rollback(); console.error('Error:', error); - throw new Error("Personnel update failed"); + throw new Error('Personnel update failed'); } finally { if (conn) conn.release(); } diff --git a/frontend/app/api/formsearch/quadrats/route.ts b/frontend/app/api/formsearch/quadrats/route.ts index 5d6d594f..fc1fbc91 100644 --- a/frontend/app/api/formsearch/quadrats/route.ts +++ b/frontend/app/api/formsearch/quadrats/route.ts @@ -1,26 +1,28 @@ -import {NextRequest, NextResponse} from "next/server"; -import {getConn, runQuery} from "@/components/processors/processormacros"; -import {FORMSEARCH_LIMIT} from "@/config/macros/azurestorage"; +import { NextRequest, NextResponse } from 'next/server'; +import { getConn, runQuery } from '@/components/processors/processormacros'; +import { FORMSEARCH_LIMIT } from '@/config/macros/azurestorage'; +import { HTTPResponses } from '@/config/macros'; export async function GET(request: NextRequest): Promise> { const schema = request.nextUrl.searchParams.get('schema'); - if ((!schema || schema === 'undefined')) throw new Error('no schema provided!'); + if (!schema || schema === 'undefined') throw new Error('no schema provided!'); const partialQuadratName = request.nextUrl.searchParams.get('searchfor')!; const conn = await getConn(); try { - const query = partialQuadratName === '' ? - `SELECT QuadratName + const query = + partialQuadratName === '' + ? `SELECT QuadratName FROM ${schema}.quadrats ORDER BY QuadratName - LIMIT ${FORMSEARCH_LIMIT}` : - `SELECT QuadratName + LIMIT ${FORMSEARCH_LIMIT}` + : `SELECT QuadratName FROM ${schema}.quadrats WHERE QuadratName LIKE ? ORDER BY QuadratName LIMIT ${FORMSEARCH_LIMIT}`; const queryParams = partialQuadratName === '' ? [] : [`%${partialQuadratName}%`]; const results = await runQuery(conn, query, queryParams); - return new NextResponse(JSON.stringify(results.map((row: any) => row.QuadratName)), {status: 200}); + return new NextResponse(JSON.stringify(results.map((row: any) => row.QuadratName)), { status: HTTPResponses.OK }); } catch (error: any) { console.error('Error in GET Quadrats:', error.message || error); throw new Error('Failed to fetch quadrat data'); diff --git a/frontend/app/api/formsearch/species/route.ts b/frontend/app/api/formsearch/species/route.ts index 46f9b883..55189288 100644 --- a/frontend/app/api/formsearch/species/route.ts +++ b/frontend/app/api/formsearch/species/route.ts @@ -1,24 +1,28 @@ -import {NextRequest, NextResponse} from "next/server"; -import {getConn, runQuery} from "@/components/processors/processormacros"; -import {FORMSEARCH_LIMIT} from "@/config/macros/azurestorage"; +import { NextRequest, NextResponse } from 'next/server'; +import { getConn, runQuery } from '@/components/processors/processormacros'; +import { FORMSEARCH_LIMIT } from '@/config/macros/azurestorage'; +import { HTTPResponses } from '@/config/macros'; export async function GET(request: NextRequest): Promise> { const schema = request.nextUrl.searchParams.get('schema'); - if ((!schema || schema === 'undefined')) throw new Error('no schema provided!'); + if (!schema || schema === 'undefined') throw new Error('no schema provided!'); const partialSpeciesCode = request.nextUrl.searchParams.get('searchfor')!; const conn = await getConn(); try { - const query = partialSpeciesCode === '' ? `SELECT SpeciesCode + const query = + partialSpeciesCode === '' + ? `SELECT SpeciesCode FROM ${schema}.species ORDER BY SpeciesCode - LIMIT ${FORMSEARCH_LIMIT}` : `SELECT SpeciesCode + LIMIT ${FORMSEARCH_LIMIT}` + : `SELECT SpeciesCode FROM ${schema}.species WHERE SpeciesCode LIKE ? ORDER BY SpeciesCode LIMIT ${FORMSEARCH_LIMIT}`; const queryParams = partialSpeciesCode === '' ? [] : [`%${partialSpeciesCode}%`]; const results = await runQuery(conn, query, queryParams); - return new NextResponse(JSON.stringify(results.map((row: any) => row.SpeciesCode)), {status: 200}); + return new NextResponse(JSON.stringify(results.map((row: any) => row.SpeciesCode)), { status: HTTPResponses.OK }); } catch (error: any) { console.error('Error in GET Quadrats:', error.message || error); throw new Error('Failed to fetch quadrat data'); diff --git a/frontend/app/api/formsearch/stems/route.ts b/frontend/app/api/formsearch/stems/route.ts index 013c7507..4e81f4cf 100644 --- a/frontend/app/api/formsearch/stems/route.ts +++ b/frontend/app/api/formsearch/stems/route.ts @@ -1,26 +1,28 @@ -import {NextRequest, NextResponse} from "next/server"; -import {getConn, runQuery} from "@/components/processors/processormacros"; -import {FORMSEARCH_LIMIT} from "@/config/macros/azurestorage"; +import { NextRequest, NextResponse } from 'next/server'; +import { getConn, runQuery } from '@/components/processors/processormacros'; +import { FORMSEARCH_LIMIT } from '@/config/macros/azurestorage'; +import { HTTPResponses } from '@/config/macros'; export async function GET(request: NextRequest): Promise> { const schema = request.nextUrl.searchParams.get('schema'); - if ((!schema || schema === 'undefined')) throw new Error('no schema provided!'); + if (!schema || schema === 'undefined') throw new Error('no schema provided!'); const partialStemTag = request.nextUrl.searchParams.get('searchfor')!; const conn = await getConn(); try { - const query = partialStemTag === '' ? - `SELECT StemTag + const query = + partialStemTag === '' + ? `SELECT StemTag FROM ${schema}.stems ORDER BY StemTag - LIMIT ${FORMSEARCH_LIMIT}` : - `SELECT StemTag + LIMIT ${FORMSEARCH_LIMIT}` + : `SELECT StemTag FROM ${schema}.stems WHERE StemTag LIKE ? ORDER BY StemTag LIMIT ${FORMSEARCH_LIMIT}`; const queryParams = partialStemTag === '' ? [] : [`%${partialStemTag}%`]; const results = await runQuery(conn, query, queryParams); - return new NextResponse(JSON.stringify(results.map((row: any) => row.StemTag ? row.StemTag : '')), {status: 200}); + return new NextResponse(JSON.stringify(results.map((row: any) => (row.StemTag ? row.StemTag : ''))), { status: HTTPResponses.OK }); } catch (error: any) { console.error('Error in GET Quadrats:', error.message || error); throw new Error('Failed to fetch quadrat data'); diff --git a/frontend/app/api/formsearch/trees/route.ts b/frontend/app/api/formsearch/trees/route.ts index 318f8fea..95eea4f4 100644 --- a/frontend/app/api/formsearch/trees/route.ts +++ b/frontend/app/api/formsearch/trees/route.ts @@ -1,26 +1,28 @@ -import {NextRequest, NextResponse} from "next/server"; -import {getConn, runQuery} from "@/components/processors/processormacros"; -import {FORMSEARCH_LIMIT} from "@/config/macros/azurestorage"; +import { NextRequest, NextResponse } from 'next/server'; +import { getConn, runQuery } from '@/components/processors/processormacros'; +import { FORMSEARCH_LIMIT } from '@/config/macros/azurestorage'; +import { HTTPResponses } from '@/config/macros'; export async function GET(request: NextRequest): Promise> { const schema = request.nextUrl.searchParams.get('schema'); - if ((!schema || schema === 'undefined')) throw new Error('no schema provided!'); + if (!schema || schema === 'undefined') throw new Error('no schema provided!'); const partialTreeTag = request.nextUrl.searchParams.get('searchfor')!; const conn = await getConn(); try { - const query = partialTreeTag === '' ? - `SELECT TreeTag + const query = + partialTreeTag === '' + ? `SELECT TreeTag FROM ${schema}.trees ORDER BY TreeTag - LIMIT ${FORMSEARCH_LIMIT}` : - `SELECT TreeTag + LIMIT ${FORMSEARCH_LIMIT}` + : `SELECT TreeTag FROM ${schema}.trees WHERE TreeTag LIKE ? ORDER BY TreeTag LIMIT ${FORMSEARCH_LIMIT}`; const queryParams = partialTreeTag === '' ? [] : [`%${partialTreeTag}%`]; const results = await runQuery(conn, query, queryParams); - return new NextResponse(JSON.stringify(results.map((row: any) => row.TreeTag)), {status: 200}); + return new NextResponse(JSON.stringify(results.map((row: any) => row.TreeTag)), { status: HTTPResponses.OK }); } catch (error: any) { console.error('Error in GET Quadrats:', error.message || error); throw new Error('Failed to fetch quadrat data'); diff --git a/frontend/app/api/formvalidation/[dataType]/[[...slugs]]/route.ts b/frontend/app/api/formvalidation/[dataType]/[[...slugs]]/route.ts index e914102d..f9122cd0 100644 --- a/frontend/app/api/formvalidation/[dataType]/[[...slugs]]/route.ts +++ b/frontend/app/api/formvalidation/[dataType]/[[...slugs]]/route.ts @@ -1,19 +1,20 @@ -import {getConn, runQuery} from "@/components/processors/processormacros"; -import {PoolConnection, format} from "mysql2/promise"; -import {NextRequest, NextResponse} from "next/server"; +import { getConn, runQuery } from '@/components/processors/processormacros'; +import { HTTPResponses } from '@/config/macros'; +import { format, PoolConnection } from 'mysql2/promise'; +import { NextRequest, NextResponse } from 'next/server'; // dataType // slugs: schema, columnName, value ONLY // needs to match dynamic format established by other slug routes! // refit to match entire rows, using dataType convention to determine what columns need testing? -export async function GET(request: NextRequest, {params}: { params: { dataType: string, slugs?: string[] } }) { +export async function GET(request: NextRequest, { params }: { params: { dataType: string; slugs?: string[] } }) { // simple dynamic validation to confirm table input values: - if (!params.slugs || params.slugs.length !== 3) throw new Error("slugs missing -- formvalidation"); - if (!params.dataType || params.dataType === 'undefined') throw new Error("no schema provided"); + if (!params.slugs || params.slugs.length !== 3) throw new Error('slugs missing -- formvalidation'); + if (!params.dataType || params.dataType === 'undefined') throw new Error('no schema provided'); const [schema, columnName, value] = params.slugs; - if (!schema || !columnName || !value) return new NextResponse(null, {status: 404}); + if (!schema || !columnName || !value) return new NextResponse(null, { status: 404 }); let conn: PoolConnection | null = null; try { @@ -21,12 +22,12 @@ export async function GET(request: NextRequest, {params}: { params: { dataType: const query = `SELECT 1 FROM ?? WHERE ?? = ? LIMIT 1`; const formatted = format(query, [`${schema}.${params.dataType}`, columnName, value]); const results = await runQuery(conn, formatted); - if (results.length === 0) return new NextResponse(null, {status: 404}); - return new NextResponse(null, {status: 200}); + if (results.length === 0) return new NextResponse(null, { status: 404 }); + return new NextResponse(null, { status: HTTPResponses.OK }); } catch (error: any) { console.error(error); throw error; } finally { if (conn) conn.release(); } -} \ No newline at end of file +} diff --git a/frontend/app/api/hash/census/route.ts b/frontend/app/api/hash/census/route.ts deleted file mode 100644 index c7d090f3..00000000 --- a/frontend/app/api/hash/census/route.ts +++ /dev/null @@ -1,23 +0,0 @@ -import {NextRequest, NextResponse} from "next/server"; -import {PoolConnection} from "mysql2/promise"; -import {getConn, runQuery} from "@/components/processors/processormacros"; -import {generateHash} from "@/config/crypto-actions"; - -export async function GET(request: NextRequest) { - const schema = request.nextUrl.searchParams.get('schema'); - if (!schema) throw new Error('no schema provided!'); - let conn: PoolConnection | null = null; - try { - conn = await getConn();// Utilize the retry mechanism effectively - - const results = await runQuery(conn, `SELECT * FROM ${schema}.census`); - if (!results) throw new Error("Call failed"); - - return new NextResponse(JSON.stringify(generateHash(results)), {status: 200}); - } catch (error) { - console.error('Error in GET:', error); - throw new Error('Failed to fetch census data'); // Providing a more user-friendly error message - } finally { - if (conn) conn.release(); // Release the connection - } -} \ No newline at end of file diff --git a/frontend/app/api/hash/plots/route.ts b/frontend/app/api/hash/plots/route.ts deleted file mode 100644 index dbbe0d13..00000000 --- a/frontend/app/api/hash/plots/route.ts +++ /dev/null @@ -1,25 +0,0 @@ -import {NextRequest, NextResponse} from "next/server"; -import {PoolConnection} from "mysql2/promise"; -import {getConn, runQuery} from "@/components/processors/processormacros"; -import {generateHash} from "@/config/crypto-actions"; - -export async function GET(request: NextRequest) { - let conn: PoolConnection | null = null; - const schema = request.nextUrl.searchParams.get('schema'); - if (!schema) throw new Error('no schema provided!'); - try { - conn = await getConn(); - // Query to get plots - const plotQuery = ` - SELECT * FROM ${schema}.plots - `; - const plotResults = await runQuery(conn, plotQuery, []); - - return new NextResponse(JSON.stringify(generateHash(plotResults)), {status: 200}); - } catch (error) { - console.error('Error in GET:', error); - throw new Error('Failed to fetch plot data'); - } finally { - if (conn) conn.release(); - } -} \ No newline at end of file diff --git a/frontend/app/api/hash/quadrats/route.ts b/frontend/app/api/hash/quadrats/route.ts deleted file mode 100644 index 50976cf2..00000000 --- a/frontend/app/api/hash/quadrats/route.ts +++ /dev/null @@ -1,21 +0,0 @@ -import {NextRequest, NextResponse} from "next/server"; -import {PoolConnection} from "mysql2/promise"; -import {getConn, runQuery} from "@/components/processors/processormacros"; -import {generateHash} from "@/config/crypto-actions"; - -export async function GET(request: NextRequest) { - const schema = request.nextUrl.searchParams.get('schema'); - if (!schema) throw new Error('no schema provided!'); - let conn: PoolConnection | null = null; - try { - conn = await getConn(); - const query = `SELECT * FROM ${schema}.quadrats`; - const results = await runQuery(conn, query); - return new NextResponse(JSON.stringify(generateHash(results)), {status: 200}); - } catch (error) { - console.error('Error:', error); - throw new Error("Call failed"); - } finally { - if (conn) conn.release(); - } -} \ No newline at end of file diff --git a/frontend/app/api/postvalidation/route.ts b/frontend/app/api/postvalidation/route.ts new file mode 100644 index 00000000..0df42b31 --- /dev/null +++ b/frontend/app/api/postvalidation/route.ts @@ -0,0 +1,287 @@ +import { NextRequest, NextResponse } from 'next/server'; +import { getConn, runQuery } from '@/components/processors/processormacros'; +import { HTTPResponses } from '@/config/macros'; + +export async function GET(request: NextRequest) { + const schema = request.nextUrl.searchParams.get('schema'); + if (!schema) throw new Error('no schema variable provided!'); + + const conn = await getConn(); + const query = `SELECT QueryID, QueryName, Description FROM ${schema}.postvalidationqueries WHERE IsEnabled IS TRUE;`; + const results = await runQuery(conn, query); + if (results.length === 0) { + return new NextResponse(JSON.stringify({ message: 'No queries found' }), { + status: HTTPResponses.NOT_FOUND + }); + } + const postValidations = results.map((row: any) => ({ + queryID: row.QueryID, + queryName: row.QueryName, + queryDescription: row.Description + })); + return new NextResponse(JSON.stringify(postValidations), { + status: HTTPResponses.OK + }); +} + +// searchParams: schema, plot, census +// export async function GET(request: NextRequest) { +// const schema = request.nextUrl.searchParams.get('schema'); +// if (!schema) throw new Error('no schema variable provided!'); +// const currentPlotParam = request.nextUrl.searchParams.get('plot'); +// if (!currentPlotParam) throw new Error('no current PlotParam'); +// const currentCensusParam = request.nextUrl.searchParams.get('census'); +// if (!currentCensusParam) throw new Error('no current CensusParam'); +// const currentPlotID = parseInt(currentPlotParam); +// const currentCensusID = parseInt(currentCensusParam); +// const queries = { +// numRecordsByQuadrat: `SELECT q.QuadratID, COUNT(cm.CoreMeasurementID) AS MeasurementCount +// FROM ${schema}.coremeasurements cm +// JOIN ${schema}.quadrats q ON q.CensusID = cm.CensusID +// WHERE cm.CensusID = ${currentCensusID} AND q.PlotID = ${currentPlotID} +// GROUP BY QuadratID;`, +// allStemRecords: `SELECT COUNT(s.StemID) AS TotalStems +// FROM ${schema}.stems s +// JOIN ${schema}.cmattributes cma ON s.StemID = cma.CoreMeasurementID +// JOIN ${schema}.attributes a ON cma.Code = a.Code +// JOIN ${schema}.quadrats q ON q.QuadratID = s.QuadratID +// WHERE q.CensusID = ${currentCensusID} AND q.PlotID = ${currentPlotID};`, +// liveStemRecords: `SELECT COUNT(s.StemID) AS LiveStems +// FROM ${schema}.stems s +// JOIN ${schema}.cmattributes cma ON s.StemID = cma.CoreMeasurementID +// JOIN ${schema}.attributes a ON cma.Code = a.Code +// JOIN ${schema}.quadrats q ON q.QuadratID = s.QuadratID +// WHERE a.Status = 'alive' +// AND q.CensusID = ${currentCensusID} AND q.PlotID = ${currentPlotID};`, +// treeCount: `SELECT COUNT(t.TreeID) AS TotalTrees +// FROM ${schema}.trees t +// JOIN ${schema}.stems s ON s.TreeID = t.TreeID +// JOIN ${schema}.quadrats q ON q.QuadratID = s.QuadratID +// WHERE q.CensusID = ${currentCensusID} AND q.PlotID = ${currentPlotID};`, +// countNumDeadMissingByCensus: `SELECT cm.CensusID, COUNT(s.StemID) AS DeadOrMissingStems +// FROM ${schema}.stems s +// JOIN ${schema}.cmattributes cma ON s.StemID = cma.CoreMeasurementID +// JOIN ${schema}.attributes a ON cma.Code = a.Code +// JOIN ${schema}.coremeasurements cm ON s.StemID = cm.StemID +// WHERE a.Status IN ('dead', 'missing') +// GROUP BY cm.CensusID;`, +// treesOutsideLimits: `SELECT t.TreeID, s.LocalX, s.LocalY, p.DimensionX, p.DimensionY +// FROM ${schema}.trees t +// JOIN ${schema}.stems s ON t.TreeID = s.TreeID +// JOIN ${schema}.quadrats q ON s.QuadratID = q.QuadratID +// JOIN ${schema}.plots p ON q.PlotID = p.PlotID +// WHERE s.LocalX IS NULL +// OR s.LocalY IS NULL +// OR s.LocalX > p.DimensionX +// OR s.LocalY > p.DimensionY +// AND p.PlotID = ${currentPlotID};`, +// largestDBHHOMBySpecies: `SELECT sp.SpeciesID, sp.SpeciesName, MAX(cm.MeasuredDBH) AS LargestDBH, MAX(cm.MeasuredHOM) AS LargestHOM +// FROM ${schema}.species sp +// JOIN ${schema}.trees t ON sp.SpeciesID = t.SpeciesID +// JOIN ${schema}.stems s ON s.TreeID = t.TreeID +// JOIN ${schema}.coremeasurements cm ON cm.StemID = s.StemID +// GROUP BY sp.SpeciesID, sp.SpeciesName;`, +// allTreesFromLastCensusPresent: `SELECT t.TreeID, +// t.TreeTag, +// t.SpeciesID +// FROM ${schema}.trees t +// JOIN +// ${schema}.stems s_last ON t.TreeID = s_last.TreeID +// JOIN +// ${schema}.coremeasurements cm_last ON s_last.StemID = cm_last.StemID +// WHERE cm_last.CensusID = ${currentCensusID} - 1 +// AND NOT EXISTS (SELECT 1 +// FROM ${schema}.stems s_current +// JOIN +// ${schema}.coremeasurements cm_current ON s_current.StemID = cm_current.StemID +// WHERE t.TreeID = s_current.TreeID +// AND cm_current.CensusID = ${currentCensusID}) +// GROUP BY t.TreeID, t.TreeTag, t.SpeciesID;`, +// numNewStemsPerQuadratPerCensus: `SELECT q.QuadratName, +// s_current.StemID, +// s_current.StemTag, +// s_current.TreeID, +// s_current.QuadratID, +// s_current.LocalX, +// s_current.LocalY, +// s_current.CoordinateUnits +// FROM ${schema}.quadrats q +// JOIN +// ${schema}.stems s_current ON q.QuadratID = s_current.QuadratID +// JOIN +// ${schema}.coremeasurements cm_current ON s_current.StemID = cm_current.StemID +// WHERE cm_current.CensusID = ${currentCensusID} +// AND NOT EXISTS (SELECT 1 +// FROM ${schema}.stems s_last +// JOIN +// ${schema}.coremeasurements cm_last ON s_last.StemID = cm_last.StemID +// WHERE s_current.StemID = s_last.StemID +// AND cm_last.CensusID = ${currentCensusID} - 1) +// ORDER BY q.QuadratName, s_current.StemID;`, +// numNewStemsMinMaxByQuadratPerCensus: `WITH NewStems AS (SELECT s_current.QuadratID, +// s_current.StemID +// FROM ${schema}.stems s_current +// JOIN +// ${schema}.coremeasurements cm_current ON s_current.StemID = cm_current.StemID +// WHERE cm_current.CensusID = ${currentCensusID} +// AND NOT EXISTS (SELECT 1 +// FROM ${schema}.stems s_last +// JOIN +// ${schema}.coremeasurements cm_last ON s_last.StemID = cm_last.StemID +// WHERE s_current.StemID = s_last.StemID +// AND cm_last.CensusID = ${currentCensusID} - 1)), +// NewStemCounts AS (SELECT q.QuadratID, +// q.QuadratName, +// COUNT(ns.StemID) AS NewStemCount +// FROM ${schema}.quadrats q +// LEFT JOIN +// NewStems ns ON q.QuadratID = ns.QuadratID +// GROUP BY q.QuadratID, q.QuadratName), +// LeastNewStems AS (SELECT 'Least New Stems' AS StemType, +// QuadratName, +// NewStemCount +// FROM NewStemCounts +// ORDER BY NewStemCount, QuadratName +// LIMIT 1), +// MostNewStems AS (SELECT 'Most New Stems' AS StemType, +// QuadratName, +// NewStemCount +// FROM NewStemCounts +// ORDER BY NewStemCount DESC, QuadratName DESC +// LIMIT 1) +// SELECT * +// FROM LeastNewStems +// UNION ALL +// SELECT * +// FROM MostNewStems;`, +// numDeadStemsPerQuadratPerCensus: `SELECT q.QuadratName, +// s.StemID, +// s.StemTag, +// s.TreeID, +// s.QuadratID, +// s.LocalX, +// s.LocalY, +// s.CoordinateUnits, +// a.Code AS AttributeCode, +// a.Description AS AttributeDescription, +// a.Status AS AttributeStatus +// FROM ${schema}.quadrats q +// JOIN +// ${schema}.stems s ON q.QuadratID = s.QuadratID +// JOIN +// ${schema}.coremeasurements cm ON s.StemID = cm.StemID +// JOIN +// ${schema}.cmattributes cma ON cm.CoreMeasurementID = cma.CoreMeasurementID +// JOIN +// ${schema}.attributes a ON cma.Code = a.Code +// WHERE cm.CensusID = ${currentCensusID} +// AND a.Status = 'dead' +// ORDER BY q.QuadratName, s.StemID;`, +// numDeadStemsPerSpeciesPerCensus: `SELECT sp.SpeciesName, +// sp.SpeciesCode, +// s.StemID, +// s.StemTag, +// s.TreeID, +// s.QuadratID, +// s.LocalX, +// s.LocalY, +// s.CoordinateUnits, +// a.Code AS AttributeCode, +// a.Description AS AttributeDescription, +// a.Status AS AttributeStatus +// FROM ${schema}.stems s +// JOIN +// ${schema}.coremeasurements cm ON s.StemID = cm.StemID +// JOIN +// ${schema}.cmattributes cma ON cm.CoreMeasurementID = cma.CoreMeasurementID +// JOIN +// ${schema}.attributes a ON cma.Code = a.Code +// JOIN +// ${schema}.trees t ON s.TreeID = t.TreeID +// JOIN +// ${schema}.species sp ON t.SpeciesID = sp.SpeciesID +// WHERE cm.CensusID = @currentCensusID +// AND a.Status = 'dead' +// ORDER BY sp.SpeciesName, s.StemID;` +// }; +// +// let conn: PoolConnection | null = null; +// try { +// conn = await getConn(); +// const results = await Promise.all([ +// runQuery(conn, queries.numRecordsByQuadrat), +// runQuery(conn, queries.allStemRecords), +// runQuery(conn, queries.liveStemRecords), +// runQuery(conn, queries.treeCount), +// runQuery(conn, queries.countNumDeadMissingByCensus), +// runQuery(conn, queries.treesOutsideLimits), +// runQuery(conn, queries.largestDBHHOMBySpecies), +// runQuery(conn, queries.allTreesFromLastCensusPresent), +// runQuery(conn, queries.numNewStemsPerQuadratPerCensus), +// runQuery(conn, queries.numNewStemsMinMaxByQuadratPerCensus), +// runQuery(conn, queries.numDeadStemsPerQuadratPerCensus), +// runQuery(conn, queries.numDeadStemsPerSpeciesPerCensus) +// ]); +// +// const totalMeasurementCount = results[0].reduce((sum: number, record: { MeasurementCount: number }) => sum + record.MeasurementCount, 0); +// +// const response = { +// numRecordsByQuadrat: { +// totalMeasurementCount, +// data: results[0] +// }, +// allStemRecords: { +// count: results[1].length, +// data: results[1] +// }, +// liveStemRecords: { +// count: results[2].length, +// data: results[2] +// }, +// treeCount: { +// count: results[3].length, +// data: results[3] +// }, +// countNumDeadMissingByCensus: { +// count: results[4].length, +// data: results[4] +// }, +// treesOutsideLimits: { +// count: results[5].length, +// data: results[5] +// }, +// largestDBHHOMBySpecies: { +// count: results[6].length, +// data: results[6] +// }, +// allTreesFromLastCensusPresent: { +// count: results[7].length, +// data: results[7] +// }, +// numNewStemsPerQuadratPerCensus: { +// count: results[8].length, +// data: results[8] +// }, +// numNewStemsMinMaxByQuadratPerCensus: { +// count: results[9].length, +// data: results[9] +// }, +// numDeadStemsPerQuadratPerCensus: { +// count: results[10].length, +// data: results[10] +// }, +// numDeadStemsPerSpeciesPerCensus: { +// count: results[11].length, +// data: results[11] +// } +// }; +// +// return new NextResponse(JSON.stringify(response), { +// status: HTTPResponses.OK +// }); +// } catch (error: any) { +// throw new Error('Post-Summary Census Staistics: SQL query failed: ' + error.message); +// } finally { +// if (conn) conn.release(); +// } +// } diff --git a/frontend/app/api/postvalidationbyquery/[schema]/[plotID]/[censusID]/[queryID]/route.ts b/frontend/app/api/postvalidationbyquery/[schema]/[plotID]/[censusID]/[queryID]/route.ts new file mode 100644 index 00000000..90a100dc --- /dev/null +++ b/frontend/app/api/postvalidationbyquery/[schema]/[plotID]/[censusID]/[queryID]/route.ts @@ -0,0 +1,36 @@ +import { NextRequest, NextResponse } from 'next/server'; +import { HTTPResponses } from '@/config/macros'; +import { getConn, runQuery } from '@/components/processors/processormacros'; + +export async function GET(_request: NextRequest, { params }: { params: { schema: string; plotID: string; censusID: string; queryID: string } }) { + const { schema } = params; + const plotID = parseInt(params.plotID); + const censusID = parseInt(params.censusID); + const queryID = parseInt(params.queryID); + if (!schema || !plotID || !censusID || !queryID) { + return new NextResponse('Missing parameters', { status: HTTPResponses.INVALID_REQUEST }); + } + const conn = await getConn(); + const query = `SELECT QueryDefinition FROM ${schema}.postvalidationqueries WHERE QueryID = ${queryID}`; + const results = await runQuery(conn, query); + if (results.length === 0) { + return new NextResponse('Query not found', { status: HTTPResponses.NOT_FOUND }); + } + const replacements = { + schema: schema, + currentPlotID: plotID, + currentCensusID: censusID + }; + const formattedQuery = results[0].QueryDefinition.replace(/\${(.*?)}/g, (_match: any, p1: string) => replacements[p1 as keyof typeof replacements]); + const queryResults = await runQuery(conn, formattedQuery); + if (queryResults.length === 0) { + return new NextResponse('Query returned no results', { status: HTTPResponses.NOT_FOUND }); + } + return new NextResponse( + JSON.stringify({ + count: queryResults.length, + data: queryResults + }), + { status: HTTPResponses.OK } + ); +} diff --git a/frontend/app/api/refreshviews/[view]/[schema]/route.ts b/frontend/app/api/refreshviews/[view]/[schema]/route.ts new file mode 100644 index 00000000..24879ff2 --- /dev/null +++ b/frontend/app/api/refreshviews/[view]/[schema]/route.ts @@ -0,0 +1,21 @@ +import { getConn, runQuery } from '@/components/processors/processormacros'; +import { HTTPResponses } from '@/config/macros'; +import { PoolConnection } from 'mysql2/promise'; +import { NextRequest, NextResponse } from 'next/server'; + +export async function POST(_request: NextRequest, { params }: { params: { view: string; schema: string } }) { + if (!params.schema || params.schema === 'undefined' || !params.view || params.view === 'undefined' || !params) throw new Error('schema not provided'); + const { view, schema } = params; + let connection: PoolConnection | null = null; + try { + connection = await getConn(); + const query = `CALL ${schema}.Refresh${view === 'viewfulltable' ? 'ViewFullTable' : view === 'measurementssummary' ? 'MeasurementsSummary' : ''}();`; + await runQuery(connection, query); + return new NextResponse(null, { status: HTTPResponses.OK }); + } catch (e: any) { + console.error('Error:', e); + throw new Error('Call failed: ', e); + } finally { + if (connection) connection.release(); + } +} diff --git a/frontend/app/api/rollover/[dataType]/[[...slugs]]/route.ts b/frontend/app/api/rollover/[dataType]/[[...slugs]]/route.ts new file mode 100644 index 00000000..4737575d --- /dev/null +++ b/frontend/app/api/rollover/[dataType]/[[...slugs]]/route.ts @@ -0,0 +1,91 @@ +import { getConn, runQuery } from '@/components/processors/processormacros'; +import { HTTPResponses } from '@/config/macros'; +import { PoolConnection } from 'mysql2/promise'; +import { NextRequest, NextResponse } from 'next/server'; + +/** + * Handles the POST request for the rollover API endpoint, which allows users to roll over quadrat or personnel data from one census to another within a specified schema. + * + * @param request - The NextRequest object containing the incoming request data. + * @param params - The route parameters, including the `dataType` (either 'quadrats' or 'personnel') and the `slugs` (an array containing the schema, plotID, sourceCensusID, and newCensusID). + * @returns A NextResponse with a success message or an error message, along with the appropriate HTTP status code. + */ +export async function POST(request: NextRequest, { params }: { params: { dataType: string; slugs?: string[] } }) { + if (!params.slugs) throw new Error('slugs not provided'); + const [schema, plotID, sourceCensusID, newCensusID] = params.slugs; + if (!schema || !plotID || !sourceCensusID || !newCensusID) throw new Error('no schema or plotID or censusID provided'); + + let conn: PoolConnection | null = null; + try { + const { incoming } = await request.json(); + if (!Array.isArray(incoming) || incoming.length === 0) throw new Error('No quadrat or personnel IDs provided'); + + conn = await getConn(); + if (conn) console.log('connection created.'); + + let query = ``; + let queryParams = []; + + await conn.beginTransaction(); + console.log('transaction started.'); + + switch (params.dataType) { + case 'quadrats': + query = ` + INSERT INTO ${schema}.quadrats (PlotID, CensusID, QuadratName, StartX, StartY, CoordinateUnits, DimensionX, DimensionY, DimensionUnits, Area, AreaUnits, QuadratShape) + SELECT + PlotID, + ?, + QuadratName, + StartX, + StartY, + CoordinateUnits, + DimensionX, + DimensionY, + DimensionUnits, + Area, + AreaUnits, + QuadratShape + FROM ${schema}.quadrats + WHERE CensusID = ? AND QuadratID IN (${incoming.map(() => '?').join(', ')});`; + queryParams = [Number(newCensusID), Number(sourceCensusID), ...incoming]; + await runQuery(conn, query, queryParams); + break; + case 'personnel': + query = ` + INSERT INTO ${schema}.personnel (CensusID, FirstName, LastName, RoleID) + SELECT + ?, + FirstName, + LastName, + RoleID + FROM ${schema}.personnel + WHERE CensusID = ? AND PersonnelID IN (${incoming.map(() => '?').join(', ')});`; + queryParams = [Number(newCensusID), Number(sourceCensusID), ...incoming]; + await runQuery(conn, query, queryParams); + break; + default: + throw new Error('Invalid data type'); + } + await conn.commit(); // testing + return new NextResponse(JSON.stringify({ message: 'Rollover successful' }), { status: HTTPResponses.OK }); + } catch (error: any) { + await conn?.rollback(); + console.error('Error in rollover API:', error.message); + return new NextResponse(JSON.stringify({ error: error.message }), { + status: 500 + }); + } finally { + if (conn) conn.release(); + } +} + +/** + * Handles the POST request for the rollover API endpoint, which allows users to rollover quadrat or personnel data from one census to another within a given schema. + * + * The slugs provided in the URL MUST include (in order): a schema, plotID, source censusID, and new censusID to target. + * + * @param request - The NextRequest object containing the request data. + * @param params - The URL parameters, including the dataType, schema, plotID, source censusID, and new censusID. + * @returns A NextResponse with a success message or an error message. + */ diff --git a/frontend/app/api/specieslimits/[speciesID]/route.ts b/frontend/app/api/specieslimits/[speciesID]/route.ts new file mode 100644 index 00000000..561acb64 --- /dev/null +++ b/frontend/app/api/specieslimits/[speciesID]/route.ts @@ -0,0 +1,44 @@ +import { NextRequest, NextResponse } from 'next/server'; +import { PoolConnection } from 'mysql2/promise'; +import { getConn, runQuery } from '@/components/processors/processormacros'; +import MapperFactory from '@/config/datamapper'; +import { HTTPResponses } from '@/config/macros'; + +export async function GET(request: NextRequest, { params }: { params: { speciesID: string } }) { + const schema = request.nextUrl.searchParams.get('schema'); + if (!schema) throw new Error('Schema not provided'); + if (params.speciesID === 'undefined') throw new Error('SpeciesID not provided'); + + let conn: PoolConnection | null = null; + try { + conn = await getConn(); + const query = `SELECT * FROM ${schema}.specieslimits WHERE SpeciesID = ?`; + const results = await runQuery(conn, query, [params.speciesID]); + return new NextResponse(JSON.stringify(MapperFactory.getMapper('specieslimits').mapData(results)), { status: HTTPResponses.OK }); + } catch (error: any) { + throw new Error(error); + } finally { + if (conn) conn.release(); + } +} + +export async function PATCH(request: NextRequest, { params }: { params: { speciesID: string } }) { + const schema = request.nextUrl.searchParams.get('schema'); + if (!schema) throw new Error('Schema not provided'); + if (params.speciesID === 'undefined') throw new Error('SpeciesID not provided'); + const { newRow } = await request.json(); + let conn: PoolConnection | null = null; + try { + conn = await getConn(); + await conn.beginTransaction(); + const newRowData = MapperFactory.getMapper('specieslimits').demapData([newRow])[0]; + const { ['SpeciesLimitID']: gridIDKey, ...remainingProperties } = newRowData; + const query = `UPDATE ${schema}.specieslimits SET ? WHERE ?? = ?`; + const results = await runQuery(conn, query, [remainingProperties, 'SpeciesLimitID', gridIDKey]); + } catch (e: any) { + await conn?.rollback(); + throw new Error(e); + } finally { + if (conn) conn.release(); + } +} diff --git a/frontend/app/api/sqlload/route.ts b/frontend/app/api/sqlload/route.ts index 7d04cf06..3e82e070 100644 --- a/frontend/app/api/sqlload/route.ts +++ b/frontend/app/api/sqlload/route.ts @@ -1,9 +1,9 @@ -import {NextRequest, NextResponse} from "next/server"; -import {getConn, InsertUpdateProcessingProps} from "@/components/processors/processormacros"; -import {PoolConnection} from "mysql2/promise"; -import {HTTPResponses} from "@/config/macros"; -import {FileRow, FileRowSet} from "@/config/macros/formdetails"; -import {insertOrUpdate} from "@/components/processors/processorhelperfunctions"; +import { NextRequest, NextResponse } from 'next/server'; +import { getConn, InsertUpdateProcessingProps } from '@/components/processors/processormacros'; +import { PoolConnection } from 'mysql2/promise'; +import { HTTPResponses } from '@/config/macros'; +import { FileRow, FileRowSet } from '@/config/macros/formdetails'; +import { insertOrUpdate } from '@/components/processors/processorhelperfunctions'; export async function POST(request: NextRequest) { const fileRowSet: FileRowSet = await request.json(); @@ -18,35 +18,23 @@ export async function POST(request: NextRequest) { if (!fileName) throw new Error('no file name provided!'); fileName = fileName.trim(); // plot ID - const plotIDParam = request.nextUrl.searchParams.get("plot"); + const plotIDParam = request.nextUrl.searchParams.get('plot'); if (!plotIDParam) throw new Error('no plot id provided!'); const plotID = parseInt(plotIDParam.trim()); // census ID - const censusIDParam = request.nextUrl.searchParams.get("census"); + const censusIDParam = request.nextUrl.searchParams.get('census'); if (!censusIDParam) throw new Error('no census id provided!'); const censusID = parseInt(censusIDParam.trim()); // quadrat ID - const quadratIDParam = request.nextUrl.searchParams.get("quadrat"); - if (!quadratIDParam) console.error("no quadrat ID provided"); + const quadratIDParam = request.nextUrl.searchParams.get('quadrat'); + if (!quadratIDParam) console.error('no quadrat ID provided'); const quadratID = quadratIDParam ? parseInt(quadratIDParam.trim()) : undefined; // form type - let formType = request.nextUrl.searchParams.get("formType"); + let formType = request.nextUrl.searchParams.get('formType'); if (!formType) throw new Error('no formType provided!'); formType = formType.trim(); // full name - const fullName = request.nextUrl.searchParams.get("user") ?? undefined; - // if (!fullName) throw new Error('no full name provided!'); - // fullName = fullName.trim(); - // unit of measurement --> use has been incorporated into form - // let dbhUnit = request.nextUrl.searchParams.get('dbhUnit'); - // if (!dbhUnit) throw new Error('no DBH unitOfMeasurement provided!'); - // dbhUnit = dbhUnit.trim(); - // let homUnit = request.nextUrl.searchParams.get('homUnit'); - // if (!homUnit) throw new Error('no HOM unitOfMeasurement provided!'); - // dbhUnit = dbhUnit.trim(); - // let coordUnit = request.nextUrl.searchParams.get('coordUnit'); - // if (!coordUnit) throw new Error('no Coordinate unitOfMeasurement provided!'); - // dbhUnit = dbhUnit.trim(); + const fullName = request.nextUrl.searchParams.get('user') ?? undefined; let connection: PoolConnection | null = null; // Use PoolConnection type @@ -55,19 +43,19 @@ export async function POST(request: NextRequest) { connection = await getConn(); } catch (error) { if (error instanceof Error) { - console.error("Error processing files:", error.message); + console.error('Error processing files:', error.message); return new NextResponse( JSON.stringify({ responseMessage: `Failure in connecting to SQL with ${error.message}`, - error: error.message, + error: error.message }), { status: HTTPResponses.SQL_CONNECTION_FAILURE } ); } else { - console.error("Unknown error in connecting to SQL:", error); + console.error('Unknown error in connecting to SQL:', error); return new NextResponse( JSON.stringify({ - responseMessage: `Unknown SQL connection error with error: ${error}`, + responseMessage: `Unknown SQL connection error with error: ${error}` }), { status: HTTPResponses.SQL_CONNECTION_FAILURE } ); @@ -75,10 +63,10 @@ export async function POST(request: NextRequest) { } if (!connection) { - console.error("Container client or SQL connection is undefined."); + console.error('Container client or SQL connection is undefined.'); return new NextResponse( JSON.stringify({ - responseMessage: "Container client or SQL connection is undefined", + responseMessage: 'Container client or SQL connection is undefined' }), { status: HTTPResponses.SERVICE_UNAVAILABLE } ); @@ -98,13 +86,13 @@ export async function POST(request: NextRequest) { plotID, censusID, quadratID, - fullName, + fullName }; const coreMeasurementID = await insertOrUpdate(props); if (formType === 'measurements' && coreMeasurementID) { idToRows.push({ coreMeasurementID: coreMeasurementID, fileRow: row }); } else if (formType === 'measurements' && coreMeasurementID === undefined) { - throw new Error("CoreMeasurement insertion failure at row: " + row); + throw new Error('CoreMeasurement insertion failure at row: ' + row); } } catch (error) { if (error instanceof Error) { @@ -112,15 +100,15 @@ export async function POST(request: NextRequest) { return new NextResponse( JSON.stringify({ responseMessage: `Error processing row in file ${fileName}`, - error: error.message, + error: error.message }), { status: HTTPResponses.SERVICE_UNAVAILABLE } ); } else { - console.error("Unknown error processing row:", error); + console.error('Unknown error processing row:', error); return new NextResponse( JSON.stringify({ - responseMessage: `Unknown processing error at row, in file ${fileName}`, + responseMessage: `Unknown processing error at row, in file ${fileName}` }), { status: HTTPResponses.SERVICE_UNAVAILABLE } ); @@ -129,5 +117,5 @@ export async function POST(request: NextRequest) { if (connection) connection.release(); } } - return new NextResponse(JSON.stringify({ message: "Insert to SQL successful", idToRows: idToRows }), { status: 200 }); -} \ No newline at end of file + return new NextResponse(JSON.stringify({ message: 'Insert to SQL successful', idToRows: idToRows }), { status: HTTPResponses.OK }); +} diff --git a/frontend/app/api/sqlmonitor/route.ts b/frontend/app/api/sqlmonitor/route.ts index b4a5a405..01c308a7 100644 --- a/frontend/app/api/sqlmonitor/route.ts +++ b/frontend/app/api/sqlmonitor/route.ts @@ -1,13 +1,14 @@ -import {poolMonitor} from "@/components/processors/processormacros"; -import {NextResponse} from "next/server"; +import { poolMonitor } from '@/components/processors/processormacros'; +import { HTTPResponses } from '@/config/macros'; +import { NextResponse } from 'next/server'; export async function GET() { try { const status = poolMonitor.getPoolStatus(); - return NextResponse.json({message: "Monitoring check successful ", status}, {status: 200}); + return NextResponse.json({ message: 'Monitoring check successful ', status }, { status: HTTPResponses.OK }); } catch (error: any) { // If there's an error in getting the pool status console.error('Error in pool monitoring:', error); - return NextResponse.json({message: 'Monitoring check failed', error: error.message}, {status: 500}); + return NextResponse.json({ message: 'Monitoring check failed', error: error.message }, { status: 500 }); } -} \ No newline at end of file +} diff --git a/frontend/app/api/structure/[schema]/route.ts b/frontend/app/api/structure/[schema]/route.ts new file mode 100644 index 00000000..87bd3b5d --- /dev/null +++ b/frontend/app/api/structure/[schema]/route.ts @@ -0,0 +1,21 @@ +import { NextRequest } from 'next/server'; +import { getConn, runQuery } from '@/components/processors/processormacros'; +import { PoolConnection } from 'mysql2/promise'; + +export async function GET(_request: NextRequest, { params }: { params: { schema: string } }) { + const schema = params.schema; + if (!schema) throw new Error('no schema variable provided!'); + const query = `SELECT table_name, column_name + FROM information_schema.columns + WHERE table_schema = ?`; + let conn: PoolConnection | null = null; + try { + conn = await getConn(); + return new Response(JSON.stringify(await runQuery(conn, query, [schema])), { status: 200 }); + } catch (e: any) { + console.error('Error:', e); + throw new Error('Call failed: ', e); + } finally { + if (conn) conn.release(); + } +} diff --git a/frontend/app/api/validations/[validationType]/route.ts b/frontend/app/api/validations/[validationType]/route.ts deleted file mode 100644 index 232ebb26..00000000 --- a/frontend/app/api/validations/[validationType]/route.ts +++ /dev/null @@ -1,26 +0,0 @@ -import {NextRequest, NextResponse} from "next/server"; -import {runValidationProcedure} from "@/components/processors/processorhelperfunctions"; - -export async function GET(request: NextRequest, {params}: { params: { validationType: string } }) { - const schema = request.nextUrl.searchParams.get('schema'); - const plotIDParam = request.nextUrl.searchParams.get('plotID'); - const censusIDParam = request.nextUrl.searchParams.get('censusID'); - const minValueParam = request.nextUrl.searchParams.get('minValue'); - const maxValueParam = request.nextUrl.searchParams.get('maxValue'); - const validationType = params.validationType; - - if (!schema) throw new Error('No schema variable provided!'); - if (!validationType) throw new Error('validationType object not provided!'); - - const plotID = plotIDParam ? parseInt(plotIDParam) : null; - const censusID = censusIDParam ? parseInt(censusIDParam) : null; - const minValue = minValueParam !== 'undefined' && minValueParam !== null ? parseFloat(minValueParam) : null; - const maxValue = maxValueParam !== 'undefined' && maxValueParam !== null ? parseFloat(maxValueParam) : null; - - try { - const validationResponse = await runValidationProcedure(schema, validationType, plotID, censusID, minValue, maxValue); - return new NextResponse(JSON.stringify(validationResponse), {status: 200}); - } catch (error: any) { - return new NextResponse(JSON.stringify({error: error.message}), {status: 500}); - } -} diff --git a/frontend/app/api/validations/crud/route.ts b/frontend/app/api/validations/crud/route.ts new file mode 100644 index 00000000..508eceef --- /dev/null +++ b/frontend/app/api/validations/crud/route.ts @@ -0,0 +1,76 @@ +import { NextRequest, NextResponse } from 'next/server'; +import { ValidationProceduresRDS } from '@/config/sqlrdsdefinitions/validations'; +import { format, PoolConnection } from 'mysql2/promise'; +import { getConn, runQuery } from '@/components/processors/processormacros'; +import { HTTPResponses } from '@/config/macros'; +import MapperFactory from '@/config/datamapper'; + +export async function GET(_request: NextRequest) { + let conn: PoolConnection | null = null; + try { + conn = await getConn(); + const query = `SELECT * FROM catalog.validationprocedures;`; + const results = await runQuery(conn, query); + return new NextResponse(JSON.stringify(MapperFactory.getMapper('validationprocedures').mapData(results)), { status: HTTPResponses.OK }); + } catch (error: any) { + console.error('Error:', error); + return NextResponse.json({}, { status: HTTPResponses.CONFLICT }); + } finally { + if (conn) conn.release(); + } +} + +export async function POST(request: NextRequest) { + const { validationProcedure }: { validationProcedure: ValidationProceduresRDS } = await request.json(); + let conn: PoolConnection | null = null; + try { + conn = await getConn(); + delete validationProcedure['validationID']; + const insertQuery = format('INSERT INTO ?? SET ?', [`catalog.validationprocedures`, validationProcedure]); + const results = await runQuery(conn, insertQuery); + const insertID = results.insertId; + return NextResponse.json({ insertID }, { status: HTTPResponses.OK }); + } catch (error: any) { + console.error('Error:', error); + return NextResponse.json({}, { status: HTTPResponses.CONFLICT }); + } finally { + if (conn) conn.release(); + } +} + +export async function PATCH(request: NextRequest) { + const { validationProcedure }: { validationProcedure: ValidationProceduresRDS } = await request.json(); + let conn: PoolConnection | null = null; + try { + conn = await getConn(); + const updatedValidationProcedure = delete validationProcedure['validationID']; + const updateQuery = format('UPDATE ?? SET ? WHERE ValidationID = ?', [ + `catalog.validationprocedures`, + updatedValidationProcedure, + validationProcedure.validationID + ]); + await runQuery(conn, updateQuery); + return NextResponse.json({}, { status: HTTPResponses.OK }); + } catch (error: any) { + console.error('Error:', error); + return NextResponse.json({}, { status: HTTPResponses.CONFLICT }); + } finally { + if (conn) conn.release(); + } +} + +export async function DELETE(request: NextRequest) { + const { validationProcedure }: { validationProcedure: ValidationProceduresRDS } = await request.json(); + let conn: PoolConnection | null = null; + try { + conn = await getConn(); + const deleteQuery = format('DELETE FROM ?? WHERE ValidationID = ?', [`catalog.validationprocedures`, validationProcedure.validationID]); + await runQuery(conn, deleteQuery); + return NextResponse.json({}, { status: HTTPResponses.OK }); + } catch (error: any) { + console.error('Error:', error); + return NextResponse.json({}, { status: HTTPResponses.CONFLICT }); + } finally { + if (conn) conn.release(); + } +} diff --git a/frontend/app/api/validations/procedures/[validationType]/route.ts b/frontend/app/api/validations/procedures/[validationType]/route.ts new file mode 100644 index 00000000..7606a619 --- /dev/null +++ b/frontend/app/api/validations/procedures/[validationType]/route.ts @@ -0,0 +1,30 @@ +import { NextRequest, NextResponse } from 'next/server'; +import { runValidation } from '@/components/processors/processorhelperfunctions'; +import { HTTPResponses } from '@/config/macros'; + +export async function POST(request: NextRequest, { params }: { params: { validationProcedureName: string } }) { + try { + const { schema, validationProcedureID, cursorQuery, p_CensusID, p_PlotID, minDBH, maxDBH, minHOM, maxHOM } = await request.json(); + console.log('data: ', schema, validationProcedureID, cursorQuery, p_CensusID, p_PlotID, minDBH, maxDBH, minHOM, maxHOM); + + // Execute the validation procedure using the provided inputs + const validationResponse = await runValidation(validationProcedureID, params.validationProcedureName, schema, cursorQuery, { + p_CensusID, + p_PlotID, + minDBH, + maxDBH, + minHOM, + maxHOM + }); + + return new NextResponse(JSON.stringify(validationResponse), { + status: HTTPResponses.OK, + headers: { 'Content-Type': 'application/json' } + }); + } catch (error: any) { + console.error('Error during validation:', error.message); + return new NextResponse(JSON.stringify({ error: error.message }), { + status: 500 + }); + } +} diff --git a/frontend/app/api/validations/updatepassedvalidations/route.ts b/frontend/app/api/validations/updatepassedvalidations/route.ts index e6d152a6..cc07c0a9 100644 --- a/frontend/app/api/validations/updatepassedvalidations/route.ts +++ b/frontend/app/api/validations/updatepassedvalidations/route.ts @@ -1,43 +1,24 @@ -import {NextRequest, NextResponse} from "next/server"; -import {getConn, runQuery, UpdateValidationResponse} from "@/components/processors/processormacros"; +import { NextRequest, NextResponse } from 'next/server'; +import { HTTPResponses } from '@/config/macros'; +import { updateValidatedRows } from '@/components/processors/processorhelperfunctions'; export async function GET(request: NextRequest) { - const conn = await getConn(); const schema = request.nextUrl.searchParams.get('schema'); if (!schema) throw new Error('no schema variable provided!'); const plotIDParam = request.nextUrl.searchParams.get('plotID'); const censusIDParam = request.nextUrl.searchParams.get('censusID'); + const plotID = plotIDParam ? parseInt(plotIDParam) : null; + const censusID = censusIDParam ? parseInt(censusIDParam) : null; try { - await conn.beginTransaction(); - - // Update query to toggle IsValidated status - const updateQuery = ` - UPDATE ${schema}.coremeasurements cm - LEFT JOIN ${schema}.cmverrors cme ON cm.CoreMeasurementID = cme.CoreMeasurementID - SET cm.IsValidated = TRUE - WHERE cm.IsValidated = FALSE - AND (cm.PlotID = ? OR ? IS NULL) - AND (cm.CensusID = ? OR ? IS NULL) - AND cme.CoreMeasurementID IS NULL; - `; - - const updateResult = await runQuery(conn, updateQuery, [plotIDParam, plotIDParam, censusIDParam, censusIDParam]); - const rowsValidated = updateResult.affectedRows; - console.log(`Rows Updated: ${rowsValidated}`); - - await conn.commit(); - - const response: UpdateValidationResponse = { - rowsValidated: rowsValidated - }; - - return new NextResponse(JSON.stringify(response), {status: 200}); + const results = await updateValidatedRows(schema, { p_CensusID: censusID, p_PlotID: plotID }); + return new NextResponse(JSON.stringify(results), { + status: HTTPResponses.OK + }); } catch (error: any) { - await conn.rollback(); console.error('Error in update operation:', error.message); - return new NextResponse(JSON.stringify({error: error.message}), {status: 500}); - } finally { - if (conn) conn.release(); + return new NextResponse(JSON.stringify({ error: error.message }), { + status: 500 + }); } } diff --git a/frontend/app/api/validations/validationerrordisplay/route.ts b/frontend/app/api/validations/validationerrordisplay/route.ts index b68a0244..a68def84 100644 --- a/frontend/app/api/validations/validationerrordisplay/route.ts +++ b/frontend/app/api/validations/validationerrordisplay/route.ts @@ -1,8 +1,8 @@ -import {NextRequest, NextResponse} from "next/server"; -import {getConn, runQuery} from "@/components/processors/processormacros"; -import {PoolConnection} from "mysql2/promise"; -import {CMError} from "@/config/macros/uploadsystemmacros"; -import MapperFactory from "@/config/datamapper"; +import { NextRequest, NextResponse } from 'next/server'; +import { getConn, runQuery } from '@/components/processors/processormacros'; +import { PoolConnection } from 'mysql2/promise'; +import { CMError } from '@/config/macros/uploadsystemmacros'; +import { HTTPResponses } from '@/config/macros'; export async function GET(request: NextRequest) { let conn: PoolConnection | null = null; @@ -30,32 +30,25 @@ export async function GET(request: NextRequest) { const validationErrorsRows = await runQuery(conn, validationErrorsQuery); const parsedValidationErrors: CMError[] = validationErrorsRows.map((row: any) => ({ - CoreMeasurementID: row.CoreMeasurementID, - ValidationErrorIDs: row.ValidationErrorIDs.split(',').map(Number), - Descriptions: row.Descriptions.split(',') + coreMeasurementID: row.CoreMeasurementID, + validationErrorIDs: row.ValidationErrorIDs.split(',').map(Number), + descriptions: row.Descriptions.split(',') })); - - // Query to fetch coremeasurements pending validation (no errors) - const pendingValidationQuery = ` - SELECT cm.* - FROM - ${schema}.coremeasurements AS cm - LEFT JOIN - ${schema}.cmverrors AS cme ON cm.CoreMeasurementID = cme.CoreMeasurementID - WHERE - cm.IsValidated = b'0' AND cme.CMVErrorID IS NULL; - `; - const pendingValidationRows = await runQuery(conn, pendingValidationQuery); - const mapper = MapperFactory.getMapper('coremeasurements'); - const mappedPending = mapper.mapData(pendingValidationRows); - return new NextResponse(JSON.stringify({failed: parsedValidationErrors, pending: mappedPending}), { - status: 200, - headers: { - 'Content-Type': 'application/json' + return new NextResponse( + JSON.stringify({ + failed: parsedValidationErrors + }), + { + status: HTTPResponses.OK, + headers: { + 'Content-Type': 'application/json' + } } - }); + ); } catch (error: any) { - return new NextResponse(JSON.stringify({error: error.message}), {status: 500}); + return new NextResponse(JSON.stringify({ error: error.message }), { + status: 500 + }); } finally { if (conn) conn.release(); } diff --git a/frontend/app/api/validations/validationlist/route.ts b/frontend/app/api/validations/validationlist/route.ts index 456a6e6a..0e3b80bb 100644 --- a/frontend/app/api/validations/validationlist/route.ts +++ b/frontend/app/api/validations/validationlist/route.ts @@ -1,36 +1,57 @@ -import {getConn, runQuery} from "@/components/processors/processormacros"; -import {PoolConnection} from "mysql2/promise"; -import {NextResponse} from "next/server"; +import { getConn, runQuery } from '@/components/processors/processormacros'; +import { HTTPResponses } from '@/config/macros'; +import { PoolConnection } from 'mysql2/promise'; +import { NextRequest, NextResponse } from 'next/server'; type ValidationProcedure = { + ValidationID: number; ProcedureName: string; Description: string; + Definition: string; +}; + +type SiteSpecificValidations = { + ValidationProcedureID: number; + Name: string; + Description: string; + Definition: string; }; type ValidationMessages = { - [key: string]: string; + [key: string]: { id: number; description: string; definition: string }; }; -export async function GET(): Promise> { +export async function GET(request: NextRequest): Promise> { let conn: PoolConnection | null = null; + const schema = request.nextUrl.searchParams.get('schema'); + if (!schema) throw new Error('No schema variable provided!'); try { conn = await getConn(); - const query = `SELECT ProcedureName, Description FROM catalog.validationprocedures WHERE IsEnabled = 1;`; + const query = `SELECT ValidationID, ProcedureName, Description, Definition FROM catalog.validationprocedures WHERE IsEnabled IS TRUE;`; const results: ValidationProcedure[] = await runQuery(conn, query); - const validationMessages: ValidationMessages = results.reduce((acc, {ProcedureName, Description}) => { - acc[ProcedureName] = Description; + const customQuery = `SELECT ValidationProcedureID, Name, Description, Definition FROM ${schema}.sitespecificvalidations;`; + const customResults: SiteSpecificValidations[] = await runQuery(conn, customQuery); + + const validationMessages: ValidationMessages = results.reduce((acc, { ValidationID, ProcedureName, Description, Definition }) => { + acc[ProcedureName] = { id: ValidationID, description: Description, definition: Definition }; return acc; }, {} as ValidationMessages); - return new NextResponse(JSON.stringify(validationMessages), { - status: 200, - headers: {'Content-Type': 'application/json'} - }); + const siteValidationMessages: ValidationMessages = customResults.reduce((acc, { ValidationProcedureID, Name, Description, Definition }) => { + acc[Name] = { id: ValidationProcedureID, description: Description, definition: Definition }; + return acc; + }, {} as ValidationMessages); + return new NextResponse(JSON.stringify({ coreValidations: validationMessages, siteValidations: siteValidationMessages }), { + status: HTTPResponses.OK, + headers: { 'Content-Type': 'application/json' } + }); } catch (error: any) { console.error('Error in GET request:', error.message); - return new NextResponse(JSON.stringify({error: error.message}), {status: 500}); + return new NextResponse(JSON.stringify({ error: error.message }), { + status: 500 + }); } finally { if (conn) conn.release(); } diff --git a/frontend/app/contexts/datavalidityprovider.tsx b/frontend/app/contexts/datavalidityprovider.tsx index 8fd929c5..b3c7a08b 100644 --- a/frontend/app/contexts/datavalidityprovider.tsx +++ b/frontend/app/contexts/datavalidityprovider.tsx @@ -1,15 +1,16 @@ -"use client"; -import React, {createContext, useCallback, useContext, useEffect, useState} from "react"; -import {useOrgCensusContext, usePlotContext, useSiteContext} from "./userselectionprovider"; -import {UnifiedValidityFlags} from "@/config/macros"; -import {useLoading} from "./loadingprovider"; +'use client'; +import React, { createContext, useCallback, useContext, useEffect, useState } from 'react'; +import { UnifiedValidityFlags } from '@/config/macros'; + +import { useOrgCensusContext, usePlotContext, useSiteContext } from './userselectionprovider'; +import { useLoading } from './loadingprovider'; const initialValidityState: UnifiedValidityFlags = { attributes: false, personnel: false, species: false, quadrats: false, - quadratpersonnel: false, + quadratpersonnel: false }; const DataValidityContext = createContext<{ @@ -19,12 +20,9 @@ const DataValidityContext = createContext<{ recheckValidityIfNeeded: () => Promise; }>({ validity: initialValidityState, - setValidity: () => { - }, - triggerRefresh: () => { - }, - recheckValidityIfNeeded: async () => { - }, + setValidity: () => {}, + triggerRefresh: () => {}, + recheckValidityIfNeeded: async () => {} }); const debounce = (func: (...args: any[]) => void, wait: number) => { @@ -35,42 +33,56 @@ const debounce = (func: (...args: any[]) => void, wait: number) => { }; }; -export const DataValidityProvider = ({children}: { children: React.ReactNode }) => { +export const DataValidityProvider = ({ children }: { children: React.ReactNode }) => { const [validity, setValidityState] = useState(initialValidityState); const [refreshNeeded, setRefreshNeeded] = useState(false); - const {setLoading} = useLoading(); + const { setLoading } = useLoading(); const currentSite = useSiteContext(); const currentPlot = usePlotContext(); const currentCensus = useOrgCensusContext(); const setValidity = useCallback((type: keyof UnifiedValidityFlags, value: boolean) => { - setValidityState(prev => ({...prev, [type]: value})); + setValidityState(prev => ({ ...prev, [type]: value })); }, []); - const checkDataValidity = useCallback(async (type?: keyof UnifiedValidityFlags) => { - if (!currentSite || !currentPlot || !currentCensus) return; - setLoading(true, 'Pre-validation in progress...'); - const url = `/api/cmprevalidation/${type}/${currentSite.schemaName}/${currentPlot.id}/${currentCensus.plotCensusNumber}`; - let response; - try { - response = await fetch(url, {method: 'GET'}); - } catch (error) { - console.error(error); - response = {ok: false}; - } - setValidity(type as keyof UnifiedValidityFlags, response.ok); - setLoading(false); - }, [currentSite, currentPlot, currentCensus, setValidity, validity]); + const checkDataValidity = useCallback( + async (types: (keyof UnifiedValidityFlags)[]) => { + if (!currentSite || !currentPlot || !currentCensus) return; + + setLoading(true, 'Pre-validation in progress...'); + try { + const results = await Promise.all( + types.map(async type => { + const url = `/api/cmprevalidation/${type}/${currentSite.schemaName}/${currentPlot.plotID}/${currentCensus.plotCensusNumber}`; + try { + const response = await fetch(url, { method: 'GET' }); + return { type, isValid: response.ok }; + } catch (error) { + console.error(error); + return { type, isValid: false }; + } + }) + ); + + results.forEach(({ type, isValid }) => { + setValidity(type, isValid); + }); + } finally { + setLoading(false); + } + }, + [currentSite, currentPlot, currentCensus, setValidity] + ); const recheckValidityIfNeeded = useCallback(async () => { - if ((Object.values(validity).some(flag => !flag)) || refreshNeeded) { + if (Object.values(validity).some(flag => !flag) || refreshNeeded) { const typesToRefresh = Object.entries(validity) .filter(([_, value]) => !value) .map(([key]) => key as keyof UnifiedValidityFlags); - await Promise.all(typesToRefresh.map(item => checkDataValidity(item))); + await checkDataValidity(typesToRefresh); setRefreshNeeded(false); // Reset the refresh flag after rechecking } else { console.log('No flags set for rechecking, or missing site/plot/census data'); @@ -91,20 +103,19 @@ export const DataValidityProvider = ({children}: { children: React.ReactNode }) } }, [currentSite, currentPlot, currentCensus]); - const triggerRefresh = useCallback((types?: (keyof UnifiedValidityFlags)[]) => { - if (types) { - types.forEach(type => setValidity(type, false)); - } else { - Object.keys(validity).forEach(key => setValidity(key as keyof UnifiedValidityFlags, false)); - } - setRefreshNeeded(true); // Trigger a refresh - }, [setValidity]); - - return ( - - {children} - + const triggerRefresh = useCallback( + (types?: (keyof UnifiedValidityFlags)[]) => { + if (types) { + types.forEach(type => setValidity(type, false)); + } else { + Object.keys(validity).forEach(key => setValidity(key as keyof UnifiedValidityFlags, false)); + } + setRefreshNeeded(true); // Trigger a refresh + }, + [setValidity] ); + + return {children}; }; export const useDataValidityContext = () => useContext(DataValidityContext); diff --git a/frontend/app/contexts/listselectionprovider.tsx b/frontend/app/contexts/listselectionprovider.tsx index 07a74721..720b4f47 100644 --- a/frontend/app/contexts/listselectionprovider.tsx +++ b/frontend/app/contexts/listselectionprovider.tsx @@ -1,17 +1,9 @@ // ListSelectionProvider.tsx -"use client"; -import React, {createContext, Dispatch, useContext, useReducer} from 'react'; -import { - createEnhancedDispatch, - EnhancedDispatch, - genericLoadReducer, - LoadAction -} from "@/config/macros/contextreducers"; -import {QuadratRDS} from "@/config/sqlrdsdefinitions/tables/quadratrds"; -import {PlotRDS} from "@/config/sqlrdsdefinitions/tables/plotrds"; -import {SubquadratRDS} from '@/config/sqlrdsdefinitions/tables/subquadratrds'; -import {SitesRDS} from '@/config/sqlrdsdefinitions/tables/sitesrds'; -import {OrgCensus} from '@/config/sqlrdsdefinitions/orgcensusrds'; +'use client'; +import React, { createContext, Dispatch, useContext, useReducer } from 'react'; +import { createEnhancedDispatch, EnhancedDispatch, genericLoadReducer, LoadAction } from '@/config/macros/contextreducers'; +import { PlotRDS, QuadratRDS, SitesRDS, SubquadratRDS } from '@/config/sqlrdsdefinitions/zones'; +import { OrgCensus } from '@/config/sqlrdsdefinitions/timekeeping'; // contexts export const PlotListContext = createContext([]); @@ -28,25 +20,18 @@ export const SubquadratListDispatchContext = createContext | undefined>(undefined); export const FirstLoadDispatchContext = createContext | undefined>(undefined); -export function ListSelectionProvider({children}: Readonly<{ children: React.ReactNode }>) { +export function ListSelectionProvider({ children }: Readonly<{ children: React.ReactNode }>) { const [plotList, plotListDispatch] = useReducer>>(genericLoadReducer, []); - const [orgCensusList, orgCensusListDispatch] = - useReducer>>(genericLoadReducer, []); + const [orgCensusList, orgCensusListDispatch] = useReducer>>(genericLoadReducer, []); - const [quadratList, quadratListDispatch] = - useReducer>>(genericLoadReducer, []); + const [quadratList, quadratListDispatch] = useReducer>>(genericLoadReducer, []); - const [subquadratList, subquadratListDispatch] = - useReducer>>(genericLoadReducer, []); + const [subquadratList, subquadratListDispatch] = useReducer>>(genericLoadReducer, []); - const [siteList, siteListDispatch] = - useReducer>>(genericLoadReducer, []); + const [siteList, siteListDispatch] = useReducer>>(genericLoadReducer, []); - const [firstLoad, firstLoadDispatch] = useReducer( - firstLoadReducer, - true - ); + const [firstLoad, firstLoadDispatch] = useReducer(firstLoadReducer, true); const enhancedPlotListDispatch = createEnhancedDispatch(plotListDispatch, 'plotList'); const enhancedOrgCensusListDispatch = createEnhancedDispatch(orgCensusListDispatch, 'censusList'); @@ -66,9 +51,7 @@ export function ListSelectionProvider({children}: Readonly<{ children: React.Rea - - {children} - + {children} diff --git a/frontend/app/contexts/loadingprovider.tsx b/frontend/app/contexts/loadingprovider.tsx index cb518a82..a15fe190 100644 --- a/frontend/app/contexts/loadingprovider.tsx +++ b/frontend/app/contexts/loadingprovider.tsx @@ -1,6 +1,6 @@ -"use client"; +'use client'; -import React, {createContext, useContext, useState} from "react"; +import React, { createContext, useContext, useState } from 'react'; const LoadingContext = createContext<{ isLoading: boolean; @@ -9,11 +9,10 @@ const LoadingContext = createContext<{ }>({ isLoading: false, loadingMessage: '', - setLoading: () => { - } + setLoading: () => {} }); -export function LoadingProvider({children}: Readonly<{ children: React.ReactNode }>) { +export function LoadingProvider({ children }: Readonly<{ children: React.ReactNode }>) { const [isLoading, setIsLoading] = useState(false); const [loadingMessage, setLoadingMessage] = useState(''); @@ -22,12 +21,7 @@ export function LoadingProvider({children}: Readonly<{ children: React.ReactNode setLoadingMessage(message); }; - return ( - - {children} - - ); + return {children}; } -// Custom hook to use the loading context -export const useLoading = () => useContext(LoadingContext); \ No newline at end of file +export const useLoading = () => useContext(LoadingContext); diff --git a/frontend/app/contexts/lockanimationcontext.tsx b/frontend/app/contexts/lockanimationcontext.tsx index f047e320..b0db5bf0 100644 --- a/frontend/app/contexts/lockanimationcontext.tsx +++ b/frontend/app/contexts/lockanimationcontext.tsx @@ -1,5 +1,5 @@ -"use client"; -import React, {createContext, useContext, useState, ReactNode} from 'react'; +'use client'; +import React, { createContext, ReactNode, useContext, useState } from 'react'; interface LockAnimationContextProps { isPulsing: boolean; @@ -8,21 +8,17 @@ interface LockAnimationContextProps { const LockAnimationContext = createContext(undefined); -export const LockAnimationProvider: React.FC<{ children: ReactNode }> = ({children}) => { +export const LockAnimationProvider: React.FC<{ children: ReactNode }> = ({ children }) => { const [isPulsing, setIsPulsing] = useState(false); const triggerPulse = () => { setIsPulsing(true); setTimeout(() => { setIsPulsing(false); - }, 3000); + }, 5000); }; - return ( - - {children} - - ); + return {children}; }; export const useLockAnimation = () => { diff --git a/frontend/app/contexts/userselectionprovider.tsx b/frontend/app/contexts/userselectionprovider.tsx index 587a61d2..2eeb8e14 100644 --- a/frontend/app/contexts/userselectionprovider.tsx +++ b/frontend/app/contexts/userselectionprovider.tsx @@ -1,24 +1,16 @@ // userselectionprovider.tsx -"use client"; -import React, {createContext, useContext, useReducer} from "react"; -import { - createEnhancedDispatch, - EnhancedDispatch, - genericLoadContextReducer, - LoadAction -} from "@/config/macros/contextreducers"; -import {Site} from "@/config/sqlrdsdefinitions/tables/sitesrds"; -import {Quadrat} from "@/config/sqlrdsdefinitions/tables/quadratrds"; -import {Plot} from "@/config/sqlrdsdefinitions/tables/plotrds"; +'use client'; +import React, { createContext, useContext, useReducer } from 'react'; +import { createEnhancedDispatch, EnhancedDispatch, genericLoadContextReducer, LoadAction } from '@/config/macros/contextreducers'; import { useOrgCensusListContext, usePlotListContext, useQuadratListContext, useSiteListContext, useSubquadratListContext -} from "@/app/contexts/listselectionprovider"; -import {OrgCensus} from "@/config/sqlrdsdefinitions/orgcensusrds"; -import {Subquadrat} from "@/config/sqlrdsdefinitions/tables/subquadratrds"; +} from '@/app/contexts/listselectionprovider'; +import { Plot, Quadrat, Site, Subquadrat } from '@/config/sqlrdsdefinitions/zones'; +import { OrgCensus } from '@/config/sqlrdsdefinitions/timekeeping'; export const PlotContext = createContext(undefined); export const OrgCensusContext = createContext(undefined); @@ -31,7 +23,7 @@ export const QuadratDispatchContext = createContext | export const SubquadratDispatchContext = createContext | undefined>(undefined); export const SiteDispatchContext = createContext | undefined>(undefined); -export default function UserSelectionProvider({children}: Readonly<{ children: React.ReactNode }>) { +export default function UserSelectionProvider({ children }: Readonly<{ children: React.ReactNode }>) { const plotListContext = usePlotListContext(); const orgCensusListContext = useOrgCensusListContext(); const quadratListContext = useQuadratListContext(); @@ -76,9 +68,7 @@ export default function UserSelectionProvider({children}: Readonly<{ children: R - - {children} - + {children} diff --git a/frontend/app/error.tsx b/frontend/app/error.tsx new file mode 100644 index 00000000..5891e402 --- /dev/null +++ b/frontend/app/error.tsx @@ -0,0 +1,24 @@ +'use client'; + +import React, { useEffect } from 'react'; +import { Box, Button, Typography } from '@mui/joy'; + +const ErrorPage = ({ error, reset }: { error: Error; reset: () => void }) => { + useEffect(() => { + const timer = setTimeout(() => { + reset(); + }, 5000); + return () => clearTimeout(timer); + }, [reset]); + + return ( + + Something went wrong + {error.message} + Retrying in 5 seconds... + + + ); +}; + +export default ErrorPage; diff --git a/frontend/app/global-error.tsx b/frontend/app/global-error.tsx new file mode 100644 index 00000000..377eaa8a --- /dev/null +++ b/frontend/app/global-error.tsx @@ -0,0 +1,13 @@ +'use client'; // Error boundaries must be Client Components + +export default function GlobalError({ error, reset }: { error: Error & { digest?: string }; reset: () => void }) { + return ( + // global-error must include html and body tags + + +

Something went wrong!

+ + + + ); +} diff --git a/frontend/app/icon.jpg b/frontend/app/icon.jpg new file mode 100644 index 00000000..758391c5 Binary files /dev/null and b/frontend/app/icon.jpg differ diff --git a/frontend/app/layout.tsx b/frontend/app/layout.tsx index de13158f..5498a1a9 100644 --- a/frontend/app/layout.tsx +++ b/frontend/app/layout.tsx @@ -1,38 +1,38 @@ -import "@/styles/globals.css"; -import {Providers} from "./providers"; -import React from "react"; -import {ListSelectionProvider} from "@/app/contexts/listselectionprovider"; -import {Box} from "@mui/joy"; -import UserSelectionProvider from "@/app/contexts/userselectionprovider"; -import {LoadingProvider} from "@/app/contexts/loadingprovider"; -import {GlobalLoadingIndicator} from "@/styles/globalloadingindicator"; -import {DataValidityProvider} from "@/app/contexts/datavalidityprovider"; -import {LockAnimationProvider} from "./contexts/lockanimationcontext"; +import '@/styles/globals.css'; +import React from 'react'; +import { ListSelectionProvider } from '@/app/contexts/listselectionprovider'; +import { Box } from '@mui/joy'; +import UserSelectionProvider from '@/app/contexts/userselectionprovider'; +import { LoadingProvider } from '@/app/contexts/loadingprovider'; +import { GlobalLoadingIndicator } from '@/styles/globalloadingindicator'; +import { DataValidityProvider } from '@/app/contexts/datavalidityprovider'; -export default function RootLayout({children,}: Readonly<{ children: React.ReactNode; }>) { +import { Providers } from './providers'; +import { LockAnimationProvider } from './contexts/lockanimationcontext'; + +export default function RootLayout({ children }: Readonly<{ children: React.ReactNode }>) { return ( - - - ForestGEO Data Entry - - - - - - - - - - - {children} - - - - - - - - + + + ForestGEO Census + + + + + + + + + + + {children} + + + + + + + ); } diff --git a/frontend/app/loginfailed/page.tsx b/frontend/app/loginfailed/page.tsx index 3d160ca8..6ea396eb 100644 --- a/frontend/app/loginfailed/page.tsx +++ b/frontend/app/loginfailed/page.tsx @@ -1,10 +1,10 @@ -import LoginFailed from "@/components/client/loginfailure"; -import {Suspense} from "react"; +import LoginFailed from '@/components/client/loginfailure'; +import { Suspense } from 'react'; export default function LoginFailedPage() { return ( - + ); -} \ No newline at end of file +} diff --git a/frontend/app/not-found.tsx b/frontend/app/not-found.tsx new file mode 100644 index 00000000..0e7f8cec --- /dev/null +++ b/frontend/app/not-found.tsx @@ -0,0 +1,34 @@ +'use client'; +import { Box, Link as MuiLink, Typography } from '@mui/joy'; +import Link from 'next/link'; + +export default function NotFound() { + return ( + + + Not Found + + + Could not find requested resource + + + + Return Home + + + + ); +} diff --git a/frontend/app/page.tsx b/frontend/app/page.tsx index 05878239..3d914f1b 100644 --- a/frontend/app/page.tsx +++ b/frontend/app/page.tsx @@ -1,3 +1,3 @@ export default function HomePage() { return <>; -} \ No newline at end of file +} diff --git a/frontend/app/programerror.tsx b/frontend/app/programerror.tsx deleted file mode 100644 index 880a1d80..00000000 --- a/frontend/app/programerror.tsx +++ /dev/null @@ -1,24 +0,0 @@ -'use client'; -import React, {useEffect} from 'react'; -import Divider from '@mui/joy/Divider'; -import {Button, Card, CardContent} from '@mui/joy'; - -export default function ProgramError({error, reset}: Readonly<{ error: Error; reset: () => void; }>) { - useEffect(() => { - console.error(error); - }, [error]); - - return ( - - -

Something went wrong!

- -

Error was: {error.message}

-

Error cause: {error.cause as string}

- -
-
- ); -} \ No newline at end of file diff --git a/frontend/app/providers.tsx b/frontend/app/providers.tsx index 21460888..66a034f5 100644 --- a/frontend/app/providers.tsx +++ b/frontend/app/providers.tsx @@ -1,21 +1,19 @@ -"use client"; -import * as React from "react"; -import {SessionProvider} from "next-auth/react"; -import ThemeRegistry from "@/components/themeregistry/themeregistry"; -import {LocalizationProvider} from "@mui/x-date-pickers"; -import {AdapterMoment} from '@mui/x-date-pickers/AdapterMoment'; +'use client'; +import * as React from 'react'; +import { SessionProvider } from 'next-auth/react'; +import ThemeRegistry from '@/components/themeregistry/themeregistry'; +import { LocalizationProvider } from '@mui/x-date-pickers'; +import { AdapterMoment } from '@mui/x-date-pickers/AdapterMoment'; export interface ProvidersProps { children: React.ReactNode; } -export function Providers({children}: Readonly) { +export function Providers({ children }: Readonly) { return ( - - {children} - + {children} ); diff --git a/frontend/components/client/clientmacros.tsx b/frontend/components/client/clientmacros.tsx index a3dee877..b12662c5 100644 --- a/frontend/components/client/clientmacros.tsx +++ b/frontend/components/client/clientmacros.tsx @@ -1,27 +1,20 @@ -"use client"; +'use client'; -import {Box, Collapse, LinearProgress, LinearProgressProps, Slide, SlideProps, Typography} from "@mui/material"; -import React from "react"; +import { Box, Collapse, LinearProgress, LinearProgressProps, Slide, SlideProps, Typography } from '@mui/material'; +import React from 'react'; -export function LinearProgressWithLabel(props: LinearProgressProps & { value?: number, currentlyrunningmsg?: string }) { +export function LinearProgressWithLabel(props: LinearProgressProps & { value?: number; currentlyrunningmsg?: string }) { return ( - - - {props.value ? ( - - ) : ( - - )} + + + {props.value ? : } - + {props.value ? ( - {`${Math.round( - props?.value, - )}% --> ${props?.currentlyrunningmsg}`} + {`${Math.round(props?.value)}% --> ${props?.currentlyrunningmsg}`} ) : ( {`${props?.currentlyrunningmsg}`} )} - ); @@ -32,12 +25,10 @@ interface SlideToggleProps { children: React.ReactNode; } -export function SlideToggle({isOpen, children}: SlideToggleProps) { +export function SlideToggle({ isOpen, children }: SlideToggleProps) { return ( - - {children} - + {children} ); } @@ -46,6 +37,4 @@ interface TransitionComponentProps extends Omit { children: React.ReactElement; } -export const TransitionComponent: React.FC = ({children, ...props}) => ( - {children} -); \ No newline at end of file +export const TransitionComponent: React.FC = ({ children, ...props }) => {children}; diff --git a/frontend/components/client/datagridcolumns.tsx b/frontend/components/client/datagridcolumns.tsx new file mode 100644 index 00000000..e5cc4cd3 --- /dev/null +++ b/frontend/components/client/datagridcolumns.tsx @@ -0,0 +1,1652 @@ +import { areaSelectionOptions, unitSelectionOptions } from '@/config/macros'; +import { Accordion, AccordionDetails, AccordionGroup, AccordionSummary, Box, FormHelperText, Input, Option, Select, Stack, Typography } from '@mui/joy'; +import { GridColDef, GridRenderEditCellParams, useGridApiRef } from '@mui/x-data-grid'; +import React, { useEffect, useState } from 'react'; +import Avatar from '@mui/joy/Avatar'; +import { ExpandMore } from '@mui/icons-material'; +import { useSession } from 'next-auth/react'; +import CodeMirror from '@uiw/react-codemirror'; +import { sql } from '@codemirror/lang-sql'; +import { AttributeStatusOptions } from '@/config/sqlrdsdefinitions/core'; + +export const formatHeader = (word1: string, word2: string) => ( + + + {word1} + + {word2} + +); + +export const quadratGridColumns: GridColDef[] = [ + { + field: 'id', + headerName: '#', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { + field: 'quadratID', + headerName: '#', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { + field: 'quadratName', + headerName: 'Quadrat Name', + headerClassName: 'header', + renderHeader: () => formatHeader('Quadrat', 'Name'), + flex: 0.75, + align: 'right', + headerAlign: 'right', + type: 'string', + editable: true + }, + { + field: 'startX', + headerName: 'X', + headerClassName: 'header', + flex: 0.5, + align: 'right', + headerAlign: 'right', + type: 'number', + valueFormatter: (value: any) => { + return Number(value).toFixed(2); + }, + editable: true + }, + { + field: 'startY', + headerName: 'Y', + headerClassName: 'header', + flex: 0.5, + align: 'right', + headerAlign: 'right', + type: 'number', + valueFormatter: (value: any) => { + return Number(value).toFixed(2); + }, + editable: true + }, + { + field: 'coordinateUnits', + headerName: 'Coordinate Units', + headerClassName: 'header', + flex: 1, + // renderHeader: () => formatHeader('Coordinate', 'Units'), + align: 'right', + headerAlign: 'right', + editable: true, + type: 'singleSelect', + valueOptions: unitSelectionOptions + }, + { + field: 'area', + headerName: 'Area', + headerClassName: 'header', + flex: 0.75, + align: 'right', + headerAlign: 'right', + type: 'number', + valueFormatter: (value: any) => { + return Number(value).toFixed(2); + }, + editable: true + }, + { + field: 'areaUnits', + headerName: 'Area Unit', + headerClassName: 'header', + flex: 1, + // renderHeader: () => formatHeader('Area', 'Unit'), + align: 'right', + headerAlign: 'right', + editable: true, + type: 'singleSelect', + valueOptions: areaSelectionOptions + }, + { + field: 'dimensionX', + headerName: 'DimX', + headerClassName: 'header', + flex: 1, + renderHeader: () => formatHeader('Dimension', 'X'), + align: 'right', + type: 'number', + valueFormatter: (value: any) => { + let parsedValue = Number(value); + if (isNaN(value)) parsedValue = 0.0; + return parsedValue.toFixed(2); + }, + editable: true + }, + { + field: 'dimensionY', + headerName: 'DimY', + headerClassName: 'header', + flex: 1, + renderHeader: () => formatHeader('Dimension', 'Y'), + align: 'right', + headerAlign: 'right', + type: 'number', + valueFormatter: (value: any) => { + let parsedValue = Number(value); + if (isNaN(value)) parsedValue = 0.0; + return parsedValue.toFixed(2); + }, + editable: true + }, + { + field: 'dimensionUnits', + headerName: 'Dimension Unit', + headerClassName: 'header', + flex: 1, + renderHeader: () => formatHeader('Dimension', 'Unit'), + align: 'right', + headerAlign: 'right', + editable: true, + type: 'singleSelect', + valueOptions: unitSelectionOptions + }, + { + field: 'quadratShape', + headerName: 'Quadrat Shape', + headerClassName: 'header', + flex: 1, + renderHeader: () => formatHeader('Quadrat', 'Shape'), + align: 'right', + headerAlign: 'right', + type: 'string', + editable: true + } +]; + +export const AttributeGridColumns: GridColDef[] = [ + { + field: 'id', + headerName: '#', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { field: 'code', headerName: 'Code', headerClassName: 'header', minWidth: 150, flex: 1, editable: true }, // all unique ID columns need to be tagged 'id' + { + field: 'description', + headerName: 'Description', + headerClassName: 'header', + minWidth: 250, + flex: 1, + align: 'left', + editable: true + }, + { + field: 'status', + headerName: 'Status', + headerClassName: 'header', + minWidth: 150, + flex: 1, + align: 'left', + editable: true, + type: 'singleSelect', + valueOptions: AttributeStatusOptions + } +]; + +export const PersonnelGridColumns: GridColDef[] = [ + { + field: 'id', + headerName: '#', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { + field: 'personnelID', + headerName: 'PersonnelID', + headerClassName: 'header', + flex: 1, + align: 'left', + editable: false + }, + { + field: 'censusID', + headerName: 'Census ID', + headerAlign: 'left', + headerClassName: 'header', + flex: 1, + align: 'left', + editable: true + }, + { + field: 'firstName', + headerName: 'First Name', + headerClassName: 'header', + flex: 1, + align: 'left', + editable: true + }, + { + field: 'lastName', + headerName: 'Last Name', + headerClassName: 'header', + flex: 1, + align: 'left', + editable: true + } +]; + +export const StemTaxonomiesViewGridColumns: GridColDef[] = [ + { + field: 'id', + headerName: '#', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { field: 'stemID', headerName: '#', headerClassName: 'header', flex: 0.1, align: 'left' }, + { field: 'stemTag', headerName: 'Stem', headerClassName: 'header', flex: 1, align: 'left' }, + { field: 'treeID', headerName: 'Tree ID', headerClassName: 'header', flex: 1, align: 'left' }, + { field: 'treeTag', headerName: 'Tree', headerClassName: 'header', flex: 1, align: 'left' }, + { field: 'speciesID', headerName: 'Species ID', headerClassName: 'header', flex: 1, align: 'left' }, + { + field: 'speciesCode', + headerName: 'Species Code', + renderHeader: () => formatHeader('Species', 'Code'), + headerClassName: 'header', + flex: 1, + align: 'left' + }, + { field: 'familyID', headerName: 'Family ID', headerClassName: 'header', flex: 1, align: 'left' }, + { field: 'family', headerName: 'Family', headerClassName: 'header', flex: 1, align: 'left' }, + { field: 'genusID', headerName: 'Genus ID', headerClassName: 'header', flex: 1, align: 'left' }, + { field: 'genus', headerName: 'Genus', headerClassName: 'header', flex: 1, align: 'left' }, + { field: 'speciesName', headerName: 'Species', headerClassName: 'header', flex: 1, align: 'left' }, + { field: 'subspeciesName', headerName: 'Subspecies', headerClassName: 'header', flex: 1, align: 'left' }, + { + field: 'genusAuthority', + headerName: 'Genus Authority', + renderHeader: () => formatHeader('Genus', 'Authority'), + headerClassName: 'header', + flex: 1, + align: 'left' + }, + { + field: 'speciesAuthority', + headerName: 'Species Authority', + renderHeader: () => formatHeader('Species', 'Authority'), + headerClassName: 'header', + flex: 1, + align: 'left' + }, + { + field: 'subspeciesAuthority', + headerName: 'Subspecies Authority', + renderHeader: () => formatHeader('Subspecies', 'Authority'), + headerClassName: 'header', + flex: 1, + align: 'left' + }, + { + field: 'speciesIDLevel', + headerName: 'Species ID Level', + renderHeader: () => formatHeader('Species', 'ID Level'), + headerClassName: 'header', + flex: 1, + align: 'left' + }, + { + field: 'speciesFieldFamily', + headerName: 'Species Field Family', + renderHeader: () => formatHeader('Species', 'Field Family'), + headerClassName: 'header', + flex: 1, + align: 'left' + } +]; + +// note --> originally attempted to use GridValueFormatterParams, but this isn't exported by MUI X DataGrid anymore. replaced with for now. + +const renderDBHCell = (params: GridRenderEditCellParams) => { + const value = params.row.measuredDBH ? Number(params.row.measuredDBH).toFixed(2) : 'null'; + const units = params.row.dbhUnits || ''; + + return ( + + {value} + {units} + + ); +}; + +const renderEditDBHCell = (params: GridRenderEditCellParams) => { + const apiRef = useGridApiRef(); + const { id, row } = params; + const [error, setError] = useState(false); + const [value, setValue] = useState(row.measuredDBH); + + const handleValueChange = (event: React.ChangeEvent) => { + const inputValue = event.target.value; + const isValid = /^\d*\.?\d{0,2}$/.test(inputValue); + setError(!isValid); + if (isValid) { + setValue(inputValue); + } + }; + + const handleValueBlur = () => { + const truncatedValue = Number(value).toFixed(2); + apiRef.current.setEditCellValue({ id, field: 'measuredDBH', value: truncatedValue }); + }; + + const handleUnitsChange = (_event: React.SyntheticEvent | null, newValue: string | null) => { + if (newValue !== null) { + apiRef.current.setEditCellValue({ id, field: 'dbhUnits', value: newValue }); + } + }; + + useEffect(() => { + setValue(row.measuredDBH); + }, [row.measuredDBH]); + + return ( + + + + {error && ( + + Only numbers with up to 2 decimal places accepted! + + )} + + + + ); +}; + +const renderHOMCell = (params: GridRenderEditCellParams) => { + const value = params.row.measuredHOM ? Number(params.row.measuredHOM).toFixed(2) : 'null'; + const units = params.row.homUnits || ''; + + return ( + + {value} + {units} + + ); +}; + +const renderEditHOMCell = (params: GridRenderEditCellParams) => { + const apiRef = useGridApiRef(); + const { id, row } = params; + const [error, setError] = useState(false); + const [value, setValue] = useState(row.measuredHOM); + + const handleValueChange = (event: React.ChangeEvent) => { + const inputValue = event.target.value; + const isValid = /^\d*\.?\d{0,2}$/.test(inputValue); + setError(!isValid); + if (isValid) { + setValue(inputValue); + } + }; + + const handleValueBlur = () => { + const truncatedValue = Number(value).toFixed(2); + apiRef.current.setEditCellValue({ id, field: 'measuredHOM', value: truncatedValue }); + }; + + const handleUnitsChange = (_event: React.SyntheticEvent | null, newValue: string | null) => { + if (newValue !== null) { + apiRef.current.setEditCellValue({ id, field: 'homUnits', value: newValue }); + } + }; + + useEffect(() => { + setValue(row.measuredHOM); + }, [row.measuredHOM]); + + return ( + + + + {error && ( + + Only numbers with up to 2 decimal places accepted! + + )} + + + + ); +}; + +export const MeasurementsSummaryViewGridColumns: GridColDef[] = [ + { + field: 'id', + headerName: 'ID', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { + field: 'coreMeasurementID', + headerName: '#', + headerAlign: 'left', + headerClassName: 'header', + flex: 0.4, + align: 'left' + }, + { + field: 'quadratName', + headerName: 'Quadrat', + headerAlign: 'left', + headerClassName: 'header', + flex: 0.8, + align: 'left', + editable: true + }, + { + field: 'speciesID', + headerName: 'Species ID', + headerAlign: 'left', + headerClassName: 'header', + flex: 1, + align: 'left', + editable: true + }, + { + field: 'speciesCode', + headerName: 'Species Code', + headerAlign: 'left', + headerClassName: 'header', + flex: 1.2, + align: 'left', + editable: true + }, + { + field: 'treeID', + headerName: 'Tree ID', + headerAlign: 'left', + headerClassName: 'header', + flex: 1, + align: 'left', + editable: true + }, + { + field: 'treeTag', + headerName: 'Tree', + headerAlign: 'left', + headerClassName: 'header', + flex: 0.7, + align: 'left', + editable: true + }, + { + field: 'stemID', + headerName: 'Stem ID', + headerAlign: 'left', + headerClassName: 'header', + flex: 1, + align: 'left', + editable: true + }, + { + field: 'stemTag', + headerName: 'Stem', + headerAlign: 'left', + headerClassName: 'header', + flex: 0.7, + align: 'left', + editable: true + }, + { + field: 'stemLocalX', + headerName: 'X', + headerAlign: 'left', + headerClassName: 'header', + flex: 0.7, + type: 'number', + valueFormatter: (value: any) => { + return Number(value).toFixed(2); + }, + maxWidth: 100, + align: 'left', + editable: true + }, + { + field: 'stemLocalY', + headerName: 'Y', + headerAlign: 'left', + headerClassName: 'header', + flex: 0.7, + type: 'number', + valueFormatter: (value: any) => { + return Number(value).toFixed(2); + }, + maxWidth: 100, + align: 'left', + editable: true + }, + { + field: 'stemUnits', + headerName: 'Stem Units', + headerClassName: 'header', + flex: 0.4, + maxWidth: 65, + renderHeader: () => formatHeader('Stem', 'Units'), + align: 'center', + editable: true, + type: 'singleSelect', + valueOptions: unitSelectionOptions + }, + { + field: 'measuredDBH', + headerName: 'DBH', + headerClassName: 'header', + flex: 0.8, + align: 'right', + editable: true, + // type: 'number', + // valueFormatter: (value: any) => { + // return Number(value).toFixed(2); + // } + renderCell: renderDBHCell, + renderEditCell: renderEditDBHCell + // valueFormatter: (params: any) => { + // const value = params.row.measuredDBH ? Number(params.row.measuredDBH).toFixed(2) : 'null'; + // const units = params.row.dbhUnits || ''; + // return `${value} ${units}`; + // } + }, + // { + // field: 'dbhUnits', + // headerName: 'DBH Units', + // headerClassName: 'header', + // flex: 0.4, + // maxWidth: 65, + // renderHeader: () => formatHeader('DBH', 'Units'), + // align: 'center', + // editable: true, + // type: 'singleSelect', + // valueOptions: unitSelectionOptions + // }, + { + field: 'measuredHOM', + headerName: 'HOM', + headerClassName: 'header', + flex: 0.5, + align: 'right', + headerAlign: 'left', + editable: true, + // type: 'number', + // valueFormatter: (value: any) => { + // return Number(value).toFixed(2); + // } + renderCell: renderHOMCell, + renderEditCell: renderEditHOMCell + // valueFormatter: (params: any) => { + // const value = params.row.measuredDBH ? Number(params.row.measuredDBH).toFixed(2) : 'null'; + // const units = params.row.dbhUnits || ''; + // return `${value} ${units}`; + // } + }, + // { + // field: 'homUnits', + // headerName: 'HOM Units', + // headerClassName: 'header', + // flex: 0.4, + // maxWidth: 65, + // renderHeader: () => formatHeader('HOM', 'Units'), + // align: 'center', + // editable: true, + // type: 'singleSelect', + // valueOptions: unitSelectionOptions + // }, + { + field: 'description', + headerName: 'Description', + headerClassName: 'header', + flex: 1, + align: 'left', + editable: true + }, + { field: 'attributes', headerName: 'Attributes', headerClassName: 'header', flex: 1, align: 'left', editable: true } +]; + +export const CensusGridColumns: GridColDef[] = [ + { + field: 'id', + headerName: '#', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { + field: 'censusID', + headerName: 'ID', + type: 'number', + headerClassName: 'header', + flex: 1, + align: 'left', + editable: false + }, + { + field: 'plotCensusNumber', + headerName: 'PlotCensusNumber', + type: 'number', + headerClassName: 'header', + flex: 1, + align: 'left', + editable: false + }, + { + field: 'startDate', + headerName: 'Starting', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'date', + editable: true, + valueFormatter: (params: any) => { + if (params) { + return new Date(params).toDateString(); + } else return 'null'; + } + }, + { + field: 'endDate', + headerName: 'Ending', + headerClassName: 'header', + type: 'date', + flex: 1, + align: 'left', + editable: true, + valueFormatter: (params: any) => { + if (params) { + return new Date(params).toDateString(); + } else return 'null'; + } + }, + { + field: 'description', + headerName: 'Description', + headerClassName: 'header', + flex: 1, + type: 'string', + editable: true + } +]; + +export const ValidationErrorGridColumns: GridColDef[] = [ + { + field: 'id', + headerName: '#', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { field: 'validationErrorID', headerName: 'ValidationErrorID', headerClassName: 'header', flex: 1, align: 'left' }, + { + field: 'validationErrorDescription', + headerName: 'ValidationErrorDescription', + headerClassName: 'header', + flex: 1, + align: 'left' + } +]; + +export const CoreMeasurementsGridColumns: GridColDef[] = [ + { + field: 'id', + headerName: 'ID', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { + field: 'coreMeasurementID', + headerName: '#', + headerAlign: 'left', + headerClassName: 'header', + flex: 0.25, + align: 'left' + }, + { + field: 'censusID', + headerName: 'Census ID', + headerAlign: 'left', + headerClassName: 'header', + flex: 1, + align: 'left', + editable: true + }, + { + field: 'stemID', + headerName: 'Stem ID', + headerAlign: 'left', + headerClassName: 'header', + flex: 1, + align: 'left', + editable: true + }, + { + field: 'measuredDBH', + headerName: 'DBH', + headerClassName: 'header', + flex: 0.8, + align: 'right', + editable: true, + renderCell: renderDBHCell, + renderEditCell: renderEditDBHCell + }, + { + field: 'dbhUnits', + headerName: 'DBH Units', + headerClassName: 'header', + flex: 0.4, + maxWidth: 65, + renderHeader: () => formatHeader('DBH', 'Units'), + align: 'center', + editable: true, + type: 'singleSelect', + valueOptions: unitSelectionOptions + }, + { + field: 'measuredHOM', + headerName: 'HOM', + headerClassName: 'header', + flex: 0.5, + align: 'right', + headerAlign: 'left', + editable: true, + renderCell: renderHOMCell, + renderEditCell: renderEditHOMCell + }, + { + field: 'homUnits', + headerName: 'HOM Units', + headerClassName: 'header', + maxWidth: 65, + renderHeader: () => formatHeader('HOM', 'Units'), + align: 'center', + editable: true, + type: 'singleSelect', + valueOptions: unitSelectionOptions + } +]; + +export const SubquadratGridColumns: GridColDef[] = [ + { + field: 'id', + headerName: '#', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { field: 'ordering', headerName: 'Order', headerClassName: 'header', flex: 1, align: 'left', editable: false }, + { + field: 'subquadratName', + headerName: 'Name', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'string', + editable: true + }, + { field: 'quadratID', headerName: 'Quadrat', headerClassName: 'header', flex: 1, align: 'left', editable: false }, + { + field: 'dimensionX', + headerName: 'X-Dimension', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'number', + editable: true + }, + { + field: 'dimensionY', + headerName: 'Y-Dimension', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'number', + valueFormatter: (value: any) => { + return Number(value).toFixed(2); + }, + editable: true + }, + { + field: 'qX', + headerName: 'X', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'number', + valueFormatter: (value: any) => { + return Number(value).toFixed(2); + }, + editable: true + }, + { + field: 'qY', + headerName: 'Y', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'number', + valueFormatter: (value: any) => { + return Number(value).toFixed(2); + }, + editable: true + }, + { + field: 'unit', + headerName: 'Units', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'singleSelect', + valueOptions: unitSelectionOptions, + editable: true + } +]; + +export const StemGridColumns: GridColDef[] = [ + { + field: 'id', + headerName: '#', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { + field: 'stemTag', + headerName: 'Stem Tag', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'string', + editable: true + }, + { + field: 'localX', + headerName: 'Plot X', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'number', + valueFormatter: (value: any) => { + return Number(value).toFixed(2); + }, + editable: true + }, + { + field: 'localY', + headerName: 'Plot Y', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'number', + valueFormatter: (value: any) => { + return Number(value).toFixed(2); + }, + editable: true + }, + { + field: 'coordinateUnits', + headerName: 'Unit', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'singleSelect', + valueOptions: unitSelectionOptions, + editable: true + }, + { + field: 'moved', + headerName: 'Moved', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'boolean', + editable: true + }, + { + field: 'stemDescription', + headerName: 'StemDescription', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'string', + editable: true + } +]; + +export const SpeciesInventoryGridColumns: GridColDef[] = [ + { + field: 'id', + headerName: '#', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { field: 'speciesInventoryID', headerName: 'SpeciesInventoryID', headerClassName: 'header', flex: 1, align: 'left' }, + { field: 'censusID', headerName: 'CensusID', headerClassName: 'header', flex: 1, align: 'left' }, + { field: 'plotID', headerName: 'PlotID', headerClassName: 'header', flex: 1, align: 'left' }, + { field: 'speciesID', headerName: 'SpeciesID', headerClassName: 'header', flex: 1, align: 'left' }, + { field: 'subSpeciesID', headerName: 'SubSpeciesID', headerClassName: 'header', flex: 1, align: 'left' } +]; + +export const SpeciesGridColumns: GridColDef[] = [ + { + field: 'id', + headerName: '#', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { + field: 'speciesCode', + headerName: 'SpCode', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'string', + editable: true, + maxWidth: 125 + }, + { + field: 'speciesName', + headerName: 'Species', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'string', + editable: true + }, + { + field: 'subspeciesName', + headerName: 'Subspecies', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'string', + editable: true + }, + { + field: 'idLevel', + headerName: 'IDLevel', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'string', + editable: true + }, + { + field: 'speciesAuthority', + headerName: 'SpeciesAuth', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'string', + editable: true + }, + { + field: 'subspeciesAuthority', + headerName: 'SubspeciesAuth', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'string', + editable: true + }, + { + field: 'fieldFamily', + headerName: 'FieldFamily', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'string', + editable: true + }, + { + field: 'description', + headerName: 'Description', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'string', + editable: true + }, + { + field: 'validCode', + headerName: 'Valid Code', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'string', + editable: true + } +]; + +export const SpeciesLimitsGridColumns: GridColDef[] = [ + { + field: 'id', + headerName: '#', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { + field: 'speciesLimitID', + headerName: '#', + headerClassName: 'header', + flex: 0.3, + align: 'left', + headerAlign: 'left', + editable: false + }, + { + field: 'speciesID', + headerName: 'SpeciesID', + headerClassName: 'header', + flex: 0.3, + align: 'left', + headerAlign: 'left', + editable: false + }, + { + field: 'limitType', + headerName: 'LimitType', + renderHeader: () => formatHeader('Limit', 'Type'), + flex: 0.5, + align: 'left', + headerAlign: 'left', + type: 'singleSelect', + valueOptions: ['DBH', 'HOM'], + editable: true + }, + { + field: 'lowerBound', + headerName: 'LowerBound', + renderHeader: () => formatHeader('Lower', 'Limit'), + flex: 0.5, + align: 'left', + headerAlign: 'left', + type: 'number', + editable: true + }, + { + field: 'upperBound', + headerName: 'UpperBound', + renderHeader: () => formatHeader('Upper', 'Limit'), + flex: 0.5, + align: 'left', + headerAlign: 'left', + type: 'number', + editable: true + }, + { + field: 'unit', + headerName: 'Units', + headerClassName: 'header', + flex: 0.3, + align: 'left', + type: 'singleSelect', + valueOptions: unitSelectionOptions + } +]; + +export const RolesGridColumns: GridColDef[] = [ + { + field: 'id', + headerName: '#', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { + field: 'roleID', + headerName: '#', + headerClassName: 'header', + flex: 0.2, + align: 'right', + headerAlign: 'right', + editable: false + }, + // { field: 'roleID', headerName: 'RoleID', headerClassName: 'header', flex: 1, align: 'left', editable: false }, + { field: 'roleName', headerName: 'Role', headerClassName: 'header', flex: 1, align: 'left', editable: true }, + { + field: 'roleDescription', + headerName: 'Description', + headerClassName: 'header', + flex: 1, + align: 'left', + editable: true + } +]; + +export const ReferenceGridColumns: GridColDef[] = [ + { + field: 'id', + headerName: '#', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { field: 'referenceID', headerName: 'ReferenceID', headerClassName: 'header', flex: 1, align: 'left' }, + { field: 'publicationTitle', headerName: 'PublicationTitle', headerClassName: 'header', flex: 1, align: 'left' }, + { field: 'fullReference', headerName: 'FullReference', headerClassName: 'header', flex: 1, align: 'left' }, + { + field: 'dateOfPublication', + headerName: 'DateOfPublication', + type: 'date', + headerClassName: 'header', + flex: 1, + align: 'left', + valueGetter: (params: any) => { + if (!params.value) return null; + return new Date(params.value); + } + } +]; + +export const PlotGridColumns: GridColDef[] = [ + { + field: 'id', + headerName: '#', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { field: 'plotID', headerName: 'PlotID', headerClassName: 'header', flex: 1, align: 'left', editable: false }, + { field: 'plotName', headerName: 'PlotName', headerClassName: 'header', flex: 1, align: 'left', editable: true }, + { + field: 'locationName', + headerName: 'LocationName', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'string', + editable: true + }, + { + field: 'countryName', + headerName: 'CountryName', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'string', + editable: true + }, + { + field: 'dimensionX', + headerName: 'DimX', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'number', + valueFormatter: (value: any) => { + return Number(value).toFixed(2); + }, + editable: true + }, + { + field: 'dimensionY', + headerName: 'DimY', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'number', + valueFormatter: (value: any) => { + return Number(value).toFixed(2); + }, + editable: true + }, + { + field: 'area', + headerName: 'Area', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'number', + valueFormatter: (value: any) => { + return Number(value).toFixed(2); + }, + editable: true + }, + { + field: 'globalX', + headerName: 'GlobalX', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'number', + valueFormatter: (value: any) => { + return Number(value).toFixed(2); + }, + editable: true + }, + { + field: 'globalY', + headerName: 'GlobalY', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'number', + valueFormatter: (value: any) => { + return Number(value).toFixed(2); + }, + editable: true + }, + { + field: 'globalZ', + headerName: 'GlobalZ', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'number', + valueFormatter: (value: any) => { + return Number(value).toFixed(2); + }, + editable: true + }, + { + field: 'unit', + headerName: 'Units', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'singleSelect', + valueOptions: unitSelectionOptions + }, + { + field: 'plotShape', + headerName: 'PlotShape', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'string', + editable: true + }, + { + field: 'plotDescription', + headerName: 'PlotDescription', + headerClassName: 'header', + flex: 1, + align: 'left', + type: 'string', + editable: true + } +]; + +export const GenusGridColumns: GridColDef[] = [ + { + field: 'id', + headerName: '#', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { field: 'genusID', headerName: 'GenusID', headerClassName: 'header', flex: 1, align: 'left', editable: false }, + { field: 'familyID', headerName: 'FamilyID', headerClassName: 'header', flex: 1, align: 'left', editable: false }, + { field: 'genus', headerName: 'GenusName', headerClassName: 'header', flex: 1, align: 'left', editable: true }, + { + field: 'referenceID', + headerName: 'ReferenceID', + headerClassName: 'header', + flex: 1, + align: 'left', + editable: false + }, + { + field: 'genusAuthority', + headerName: 'Authority', + headerClassName: 'header', + flex: 1, + align: 'left', + editable: true + } +]; + +export const FamilyGridColumns: GridColDef[] = [ + { + field: 'id', + headerName: '#', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { field: 'familyID', headerName: 'FamilyID', headerClassName: 'header', flex: 1, align: 'left', editable: false }, + { field: 'family', headerName: 'Family', headerClassName: 'header', flex: 1, align: 'left', editable: false }, + { + field: 'referenceID', + headerName: 'ReferenceID', + headerClassName: 'header', + flex: 1, + align: 'left', + editable: false + } +]; +export const CMVErrorGridColumns: GridColDef[] = [ + { + field: 'id', + headerName: '#', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { field: 'cmvErrorID', headerName: 'CMVErrorID', headerClassName: 'header', flex: 1, align: 'left' }, + { field: 'coreMeasurementID', headerName: 'CoreMeasurementID', headerClassName: 'header', flex: 1, align: 'left' }, + { field: 'validationErrorID', headerName: 'ValidationErrorID', headerClassName: 'header', flex: 1, align: 'left' } +]; + +export const CMAttributeGridColumns: GridColDef[] = [ + { + field: 'id', + headerName: '#', + headerClassName: 'header', + flex: 0.3, + align: 'right', + headerAlign: 'right', + editable: false + }, + { field: 'cmaID', headerName: 'CMAID', headerClassName: 'header', flex: 1, align: 'left' }, + { field: 'coreMeasurementID', headerName: 'CoreMeasurementID', headerClassName: 'header', flex: 1, align: 'left' }, + { field: 'code', headerName: 'Code', headerClassName: 'header', flex: 1, align: 'left' } +]; + +// Combine the column definitions +const combineColumns = (primary: GridColDef[], secondary: GridColDef[]): GridColDef[] => { + const combined = [...primary]; + + secondary.forEach(secondaryColumn => { + const primaryColumnIndex = primary.findIndex(primaryColumn => primaryColumn.field === secondaryColumn.field); + if (primaryColumnIndex === -1) { + combined.push(secondaryColumn); + } else { + // Merge columns if both contain renderHeader, otherwise preserve existing properties + combined[primaryColumnIndex] = { ...combined[primaryColumnIndex], ...secondaryColumn }; + } + }); + + return combined; +}; + +const rawColumns: GridColDef[] = combineColumns(MeasurementsSummaryViewGridColumns, StemTaxonomiesViewGridColumns); + +export const ViewFullTableGridColumns = rawColumns.map(column => { + if (column.field === 'speciesCode') { + return { ...column, renderHeader: () => formatHeader('Species', 'Code') }; + } else if (column.field === 'genusAuthority') { + return { ...column, renderHeader: () => formatHeader('Genus', 'Authority') }; + } else if (column.field === 'speciesAuthority') { + return { ...column, renderHeader: () => formatHeader('Species', 'Authority') }; + } else if (column.field === 'subspeciesAuthority') { + return { ...column, renderHeader: () => formatHeader('Subspecies', 'Authority') }; + } else if (column.field === 'speciesIDLevel') { + return { ...column, renderHeader: () => formatHeader('Species', 'ID Level') }; + } else if (column.field === 'speciesFieldFamily') { + return { ...column, renderHeader: () => formatHeader('Species', 'Field Family') }; + } else if (column.field === 'stemUnits') { + return { ...column, renderHeader: () => formatHeader('Stem', 'Units') }; + } else if (column.field === 'dbhUnits') { + return { ...column, renderHeader: () => formatHeader('DBH', 'Units') }; + } else if (column.field === 'homUnits') { + return { ...column, renderHeader: () => formatHeader('HOM', 'Units') }; + } + return column; +}); + +export const ValidationProceduresGridColumns: GridColDef[] = [ + { field: 'id', headerName: 'ID', headerClassName: 'header' }, + { field: 'validationID', headerName: '#', headerClassName: 'header' }, + { + field: 'procedureName', + headerName: 'Procedure', + headerClassName: 'header', + type: 'string', + editable: true, + flex: 1, + renderCell: (params: GridRenderEditCellParams) => { + const value = params.row.procedureName.replace(/(DBH|HOM)([A-Z])/g, '$1 $2').replace(/([a-z])([A-Z])/g, '$1 $2'); + return {value}; + } + }, + { + field: 'description', + headerName: 'Description', + headerClassName: 'header', + type: 'string', + editable: true, + flex: 1, + renderCell: (params: GridRenderEditCellParams) => { + return {params.row.description}; + } + }, + { + field: 'definition', + headerName: 'SQL Implementation', + headerClassName: 'header', + type: 'string', + editable: true, + flex: 1, + renderCell: (params: GridRenderEditCellParams) => { + const { data: session } = useSession(); + let isEditing = false; + if (typeof params.id === 'string') { + isEditing = params.rowModesModel[parseInt(params.id)]?.mode === 'edit'; + } + const isAdmin = session?.user?.userStatus === 'db admin' || session?.user?.userStatus === 'global'; + + if (isEditing && isAdmin) { + return ( + { + // Update the grid row with the new value from CodeMirror + params.api.updateRows([{ ...params.row, definition: value }]); + }} + /> + ); + } + + return ( + + + + + + + {params.row.description} + + + {params.row.definition} + + + + ); + } + }, + { + field: 'createdAt', + headerName: 'Created At', + renderHeader: () => formatHeader('Created', 'At'), + type: 'date', + headerClassName: 'header', + headerAlign: 'center', + valueGetter: (params: any) => { + if (!params || !params.value) return null; + return new Date(params.value); + }, + editable: true, + flex: 0.4 + }, + { + field: 'updatedAt', + headerName: 'Updated At', + renderHeader: () => formatHeader('Updated', 'At'), + type: 'date', + headerClassName: 'header', + headerAlign: 'center', + valueGetter: (params: any) => { + if (!params || !params.value) return null; + return new Date(params.value); + }, + editable: true, + flex: 0.4 + }, + { field: 'isEnabled', headerName: 'Active?', headerClassName: 'header', type: 'boolean', editable: true, flex: 0.2 } +]; + +export const SiteSpecificValidationsGridColumns: GridColDef[] = [ + { field: 'id', headerName: 'ID', headerClassName: 'header' }, + { field: 'validationProcedureID', headerName: '#', headerClassName: 'header' }, + { + field: 'name', + headerName: 'Procedure', + headerClassName: 'header', + type: 'string', + editable: true, + flex: 1, + renderCell: (params: GridRenderEditCellParams) => { + const value = params.row.procedureName.replace(/(DBH|HOM)([A-Z])/g, '$1 $2').replace(/([a-z])([A-Z])/g, '$1 $2'); + return {value}; + } + }, + { + field: 'description', + headerName: 'Description', + headerClassName: 'header', + type: 'string', + editable: true, + flex: 1, + renderCell: (params: GridRenderEditCellParams) => { + return {params.row.description}; + } + }, + { + field: 'definition', + headerName: 'SQL Implementation', + headerClassName: 'header', + type: 'string', + editable: true, + flex: 1, + renderCell: (params: GridRenderEditCellParams) => { + return ( + + + + + + + {params.row.description} + + + {params.row.description} + + + + ); + } + }, + { field: 'isEnabled', headerName: 'Active?', headerClassName: 'header', type: 'boolean', editable: true, flex: 0.2 } +]; diff --git a/frontend/components/client/entrymodal.tsx b/frontend/components/client/entrymodal.tsx index 409a70af..d7c91327 100644 --- a/frontend/components/client/entrymodal.tsx +++ b/frontend/components/client/entrymodal.tsx @@ -1,49 +1,57 @@ -"use client"; +'use client'; import React from 'react'; -import {useFirstLoadContext, useFirstLoadDispatch,} from '@/app/contexts/listselectionprovider'; -import {Button, DialogActions, DialogContent, DialogTitle, Modal, ModalDialog, Stack, Typography,} from '@mui/joy'; +import { useFirstLoadContext, useFirstLoadDispatch } from '@/app/contexts/listselectionprovider'; +import { Button, DialogActions, DialogContent, DialogTitle, Modal, ModalDialog, Stack, Typography } from '@mui/joy'; import WarningRoundedIcon from '@mui/icons-material/WarningRounded'; import Divider from '@mui/joy/Divider'; -import {redirect} from 'next/navigation'; -import {useSession} from "next-auth/react"; +import { redirect } from 'next/navigation'; +import { useSession } from 'next-auth/react'; export default function EntryModal() { - const {data: _session, status} = useSession(); + const { data: _session, status } = useSession(); const firstLoad = useFirstLoadContext(); const firstLoadDispatch = useFirstLoadDispatch(); return ( <> - {(firstLoad && status !== 'unauthenticated') ? , reason: string) => { - if (reason !== 'backdropClick' && reason !== 'escapeKeyDown') { - return firstLoadDispatch ? firstLoadDispatch({firstLoad: false}) : undefined; - } - }}> - - - - Welcome to the Application! - - - - - Select Core Measurements Hub to view existing core - measurement data for a given plot, census, and quadrat - Select CSV & ArcGIS File Upload Hub to upload core - measurements in either CSV format or in collected ArcGIS format - Select Measurement Properties Hub to view and edit - measurement properties used in data collection - - - - - - - : (status === 'authenticated') && redirect('/dashboard')} + {firstLoad && status !== 'unauthenticated' ? ( + , reason: string) => { + if (reason !== 'backdropClick' && reason !== 'escapeKeyDown') { + return firstLoadDispatch ? firstLoadDispatch({ firstLoad: false }) : undefined; + } + }} + > + + + + Welcome to the Application! + + + + + + Select Core Measurements Hub to view existing core measurement data for a given plot, census, and quadrat + + + Select CSV & ArcGIS File Upload Hub to upload core measurements in either CSV format or in collected ArcGIS format + + + Select Measurement Properties Hub to view and edit measurement properties used in data collection + + + + + + + + + ) : ( + status === 'authenticated' && redirect('/dashboard') + )} ); -} \ No newline at end of file +} diff --git a/frontend/components/client/finalizeselectionsbutton.tsx b/frontend/components/client/finalizeselectionsbutton.tsx index 7afa0fe9..e00e6bf4 100644 --- a/frontend/components/client/finalizeselectionsbutton.tsx +++ b/frontend/components/client/finalizeselectionsbutton.tsx @@ -1,17 +1,17 @@ -"use client"; -import {Button, Grow} from "@mui/material"; -import React from "react"; +'use client'; +import { Button, Grow } from '@mui/material'; +import React from 'react'; interface FinalizeSelectionsButtonProps { onFinish: () => void; // Callback when button is clicked show: boolean; // Condition to show the button } -const FinalizeSelectionsButton: React.FC = ({onFinish, show}) => { +const FinalizeSelectionsButton: React.FC = ({ onFinish, show }) => { if (!show) return null; return ( - +