diff --git a/.github/workflows/api-develop.yml b/.github/workflows/api-develop.yml new file mode 100644 index 0000000..43a68d7 --- /dev/null +++ b/.github/workflows/api-develop.yml @@ -0,0 +1,41 @@ +name: Build API Develop + +on: + push: + branches: [develop] + workflow_dispatch: +jobs: + + APIDockerImage: + name: API Image Develop + # The type of runner that the job will run on + runs-on: ubuntu-latest + + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v2 + - name: Configure AWS credentials + if: ${{ always() }} + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.CWBICI_DEVELOP_AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.CWBICI_DEVELOP_AWS_SECRET_ACCESS_KEY }} + aws-region: ${{ secrets.CWBICI_DEVELOP_AWS_REGION }} + + - name: Login to Amazon ECR + if: ${{ success() }} + id: login-ecr + uses: aws-actions/amazon-ecr-login@v1 + + - name: Build, tag, and push image to Amazon ECR (water-api) + if: ${{ success() }} + env: + ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} + ECR_REPOSITORY: pallid-sturgeon-api + run: | + docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:latest . + docker push $ECR_REGISTRY/$ECR_REPOSITORY:latest + - name: Logout of Amazon ECR + if: ${{ always() }} + run: docker logout ${{ steps.login-ecr.outputs.registry }} \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..cb42d1e --- /dev/null +++ b/.gitignore @@ -0,0 +1,7 @@ +# local config files +envvars.list +.vscode/* + +*.exe +scan*.txt +scratch \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json deleted file mode 100644 index 47de1b6..0000000 --- a/.vscode/launch.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - // Use IntelliSense to learn about possible attributes. - // Hover to view descriptions of existing attributes. - // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 - "version": "0.2.0", - "configurations": [ - { - "name": "Launch", - "type": "go", - "request": "launch", - "mode": "auto", - "program": "${workspaceFolder}/server.go", - "env": { - "LAMBDACONTEXT": "false", - "DB_USER": "", - "DB_PASS": "", - "DB_NAME": "", - "LIB_DIR": "/usr/local/app/oracle/instantclient_21_1", - "DB_HOST": "host.docker.internal", - "DB_PORT": "1521", - "IPPK": "${workspaceFolder}/props/local.pem" - }, - "args": [] - } - ] -} diff --git a/Dockerfile b/Dockerfile index f886676..8da2c3d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,18 +1,42 @@ -FROM golang:1.15-alpine AS builder -# Install Git -RUN apk update && apk add --no-cache git -# Copy In Source Code -WORKDIR /go/src/app -COPY . . - -# Install Dependencies -RUN go get -d -v -# Build -RUN go get -d -v \ - && GOOS=linux GOARCH=amd64 CGO_ENABLED=0 \ - go build -ldflags="-w -s" -o /go/bin/pallid_sturgeon_api - -# SCRATCH IMAGE -FROM scratch -COPY --from=builder /go/bin/pallid_sturgeon_api /go/bin/pallid_sturgeon_api +FROM golang:1.23.2-alpine AS builder +RUN apk add build-base +# Install Git +RUN apk update && apk add --no-cache git + +# Copy In Source Code +WORKDIR /go/src/app +COPY . . + +# Install Dependencies +RUN go get -d -v +# Build +RUN go get -d -v \ + && GOOS=linux GOARCH=amd64 CGO_ENABLED=1 \ + go build -ldflags="-w -s" -o /go/bin/pallid_sturgeon_api + +# not SCRATCH IMAGE +# remove curl -o instantclient-basiclite.zip https://download.oracle.com/otn_software/linux/instantclient/instantclient-basiclite-linuxx64.zip -SL && \ +FROM alpine:latest +RUN apk add build-base +RUN apk add --no-cache bash +RUN apk --no-cache add libaio libnsl libc6-compat curl && \ + cd /tmp && \ + curl -o instantclient-basiclite.zip https://download.oracle.com/otn_software/linux/instantclient/2114000/instantclient-basic-linux.x64-21.14.0.0.0dbru.zip -SL && \ + unzip instantclient-basiclite.zip && \ + mv instantclient*/ /usr/lib/instantclient && \ + rm instantclient-basiclite.zip && \ + ln -s /usr/lib/instantclient/libclntsh.so.19.1 /usr/lib/libclntsh.so && \ + ln -s /usr/lib/instantclient/libocci.so.19.1 /usr/lib/libocci.so && \ + ln -s /usr/lib/instantclient/libociicus.so /usr/lib/libociicus.so && \ + ln -s /usr/lib/instantclient/libnnz19.so /usr/lib/libnnz19.so && \ + ln -s /usr/lib/libnsl.so.2 /usr/lib/libnsl.so.1 && \ + ln -s /lib/libc.so.6 /usr/lib/libresolv.so.2 && \ + ln -s /lib64/ld-linux-x86-64.so.2 /usr/lib/ld-linux-x86-64.so.2 + +ENV ORACLE_BASE /usr/lib/instantclient +ENV LD_LIBRARY_PATH /usr/lib/instantclient +ENV TNS_ADMIN /usr/lib/instantclient +ENV ORACLE_HOME /usr/lib/instantclient + +COPY --from=builder /go/bin/pallid_sturgeon_api /go/bin/pallid_sturgeon_api ENTRYPOINT ["/go/bin/pallid_sturgeon_api"] \ No newline at end of file diff --git a/README.md b/README.md index 0af3935..6e3ff5d 100644 --- a/README.md +++ b/README.md @@ -23,9 +23,11 @@ Set the following environment variables and type `go run root/main.go` from the "DB_PASS": "", "DB_NAME": "", "LIB_DIR": "", - "DB_HOST": "localhost", + "DB_HOST": "", "DB_PORT": "1521", - "IPPK": "${workspaceFolder}/props/local.pem", + "IPPK": "", Note: When running the API locally, make sure environment variable `LAMBDA` is either **not set** or is set to `LAMBDA=FALSE`. +IPPK is NOT in use currently in AWS Dev. +Use the SIG key when setting IPPK value. \ No newline at end of file diff --git a/auth/auth.go b/auth/auth.go index 6c49b37..944ffef 100644 --- a/auth/auth.go +++ b/auth/auth.go @@ -3,23 +3,22 @@ package auth import ( "crypto/rsa" "errors" - "fmt" - "io/ioutil" "log" "net/http" - "path/filepath" "strings" - "di2e.net/cwbi/pallid_sturgeon_api/server/models" - "di2e.net/cwbi/pallid_sturgeon_api/server/stores" - "github.com/dgrijalva/jwt-go" + "github.com/USACE/pallid_sturgeon_api/server/models" + "github.com/USACE/pallid_sturgeon_api/server/stores" + "github.com/golang-jwt/jwt/v4" "github.com/labstack/echo/v4" ) const ( PUBLIC = iota - PM - TM + ADMIN + OFFICEADMIN + OFFICEUSER + READONLY ) type Auth struct { @@ -27,11 +26,9 @@ type Auth struct { VerifyKey *rsa.PublicKey } -var verifyKeys []*rsa.PublicKey - /* Authorize Options: -1) Public - All CAC Users +1) Public - All KEYCLOAK Users 2) PM - Project Manager 3) TM - Team Member */ @@ -50,39 +47,37 @@ func (a *Auth) Authorize(handler echo.HandlerFunc, roles ...int) echo.HandlerFun if err != nil { return err } - c.Set("SDUSER", user) + role, err := a.Store.GetUserRoleOffice(user.Email) + if err != nil { + return err + } + c.Set("PSUSER", user) switch { - case contains(roles, TM): - for _, role := range claims.Roles { - if role == "PM" || role == "TM" { - return handler(c) - } + case contains(roles, PUBLIC): + return handler(c) + case contains(roles, ADMIN): + if role.Role == "ADMINISTRATOR" { + return handler(c) + } + case contains(roles, OFFICEADMIN): + if role.Role == "OFFICE ADMIN" { + return handler(c) } - case contains(roles, PM): - for _, role := range claims.Roles { - if role == "PM" { - return handler(c) - } + case contains(roles, OFFICEUSER): + if role.Role == "OFFICE USER" { + return handler(c) + } + case contains(roles, READONLY): + if role.Role == "READONLY" { + return handler(c) } } return echo.NewHTTPError(http.StatusUnauthorized, "") } } -func loadKeyFile(filePath string) (*rsa.PublicKey, error) { - publicKeyBytes, err := ioutil.ReadFile(filePath) - if err != nil { - return nil, err - } - return jwt.ParseRSAPublicKeyFromPEM(publicKeyBytes) -} - -func (a *Auth) LoadVerificationKey(filePath string) error { - publicKeyBytes, err := ioutil.ReadFile(filePath) - if err != nil { - return err - } - pk, err := jwt.ParseRSAPublicKeyFromPEM(publicKeyBytes) +func (a *Auth) LoadVerificationKey(publicKey string) error { + pk, err := jwt.ParseRSAPublicKeyFromPEM([]byte("-----BEGIN PUBLIC KEY-----\n" + publicKey + "\n-----END PUBLIC KEY-----")) if err != nil { return err } @@ -100,56 +95,11 @@ func (a *Auth) marshalJwt(tokenString string) (models.JwtClaim, error) { } if claims, ok := token.Claims.(jwt.MapClaims); ok && token.Valid { jwtUser := models.JwtClaim{ - Sub: claims["sub"].(string), - Name: claims["name"].(string), - Email: claims["email"].(string), - Roles: claims["roles"].([]interface{}), - } - return jwtUser, nil - } else { - return models.JwtClaim{}, errors.New("Invalid Token") - } -} - -func LoadVerificationKeys(fieldPath string) error { - files, err := ioutil.ReadDir(fieldPath) - if err != nil { - return err - } - for _, v := range files { - if ext := filepath.Ext(v.Name()); ext == ".pem" { - fmt.Printf("Loading Public Key: %s\n", v.Name()) - pk, err := loadKeyFile(fieldPath + "/" + v.Name()) - if err != nil { - return err - } - verifyKeys = append(verifyKeys, pk) - } - } - return nil -} - -func marshalJwts(tokenString string) (models.JwtClaim, error) { - var token *jwt.Token = nil - var err error - for _, verificationKey := range verifyKeys { - token, err = jwt.Parse(tokenString, func(token *jwt.Token) (interface{}, error) { - return verificationKey, nil - }) - if err == nil { - break - } - } - - if token == nil { - return models.JwtClaim{}, errors.New("Invalid Token") - } - if claims, ok := token.Claims.(jwt.MapClaims); ok && token.Valid { - jwtUser := models.JwtClaim{ - Sub: claims["sub"].(string), - Name: claims["name"].(string), - Email: claims["email"].(string), - Roles: claims["roles"].([]interface{}), + //CacUid: claims["cacUID"].(string), + Name: claims["name"].(string), + Email: claims["email"].(string), + FirstName: claims["given_name"].(string), + LastName: claims["family_name"].(string), } return jwtUser, nil } else { diff --git a/docker-compose.yml b/docker-compose.yml index ba3c26a..0f642a1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,69 +1,8 @@ -version: "3" - -services: - postgres: - image: mdillon/postgis - volumes: - - ./database/sql:/sql - - ./database/sql/init.sql:/docker-entrypoint-initdb.d/z_init.sql - environment: - - POSTGRES_PASSWORD=postgres - api: - build: - context: ./ - environment: - # NOTE: AWS_ Variables provided by execution role in deployed version - - AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE - - AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY - - AWS_DEFAULT_REGION=us-east-1 - - SD_APPLICATION_KEY=appkey - - SD_AUTH_DISABLED=False - - SD_AUTH_JWT_MOCKED=True - - SD_AWS_S3_REGION=us-east-1 - - SD_AWS_S3_BUCKET=corpsmap-data - - SD_AWS_S3_ENDPOINT=http://minio:9000 - - SD_AWS_S3_DISABLE_SSL=True - - SD_AWS_S3_FORCE_PATH_STYLE=True - - SD_LAMBDACONTEXT=false - - SD_DBUSER=sd_user - - SD_DBPASS=password - - SD_DBNAME=postgres - - SD_DBHOST=postgres - - SD_DBSSLMODE=disable - - SD_HEARTBEATKEY=password - ports: - - "80:80" - minio: - image: minio/minio - environment: - - MINIO_ACCESS_KEY=AKIAIOSFODNN7EXAMPLE - - MINIO_SECRET_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY - command: server /data - ports: - - "9000:9000" - # configure minio on startup (create buckets, etc) - # inspired by https://github.com/minio/minio/issues/4769 - # and https://gist.github.com/haxoza/22afe7cc4a9da7e8bdc09aad393a99cc - minio_init: - image: minio/mc - depends_on: - - minio - entrypoint: > - /bin/sh -c " - echo 'sleeping for 10 seconds while minio starts...'; - sleep 10; - /usr/bin/mc config host add minio http://minio:9000 AKIAIOSFODNN7EXAMPLE wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY; - /usr/bin/mc mb minio/corpsmap-data-incoming minio/corpsmap-data; - /usr/bin/mc policy set public minio/corpsmap-data; - /usr/bin/mc cp --recursive /media/projects/ minio/corpsmap-data/sd/projects/ ; - exit 0; - " - volumes: - - ./data:/media - pgadmin: - image: dpage/pgadmin4 - environment: - - PGADMIN_DEFAULT_EMAIL=postgres@postgres.com - - PGADMIN_DEFAULT_PASSWORD=postgres - ports: - - "8081:80" +version: "3" +services: + pallid-api: + image: pallid-sturgeon-api:latest + env_file: ./envvars.list + ports: + - "700:80" + - "701:8080" \ No newline at end of file diff --git a/docs/APIDOC.md b/docs/APIDOC.md index fcb009f..e21fd5d 100644 --- a/docs/APIDOC.md +++ b/docs/APIDOC.md @@ -2,8 +2,42 @@ Table of Contents - [Pallid Sturgeon API](#pallidsturgeon-api) + - [projects](#projects) + - [sampleMethods](#sampleMethods) + - [sampleUnitTypes](#sampleUnitTypes) + - [fieldOffices](#fieldOffices) - [seasons](#seasons) - - [uploads](#uploads) + - [segments](#segments) + - [bends](#bends) + - [siteDataEntry](#siteDataEntry) + - [fishDataEntry](#fishDataEntry) + - [moriverDataEntry](#moriverDataEntry) + - [supplementalDataEntry](#supplementalDataEntry) + - [upload](#upload) + - [fishDataSummary](#fishDataSummary) + - [suppDataSummary](#suppDataSummary) + - [missouriDataSummary](#missouriDataSummary) + - [geneticDataSummary](#geneticDataSummary) + - [searchDataSummary](#searchDataSummary) +--- +### Projects +- List Projects \ + [http://localhost:8080/psapi/projects](http://localhost:8080/psapi/projects) + +--- +### SampleMethods +- List SampleMethods \ + [http://localhost:8080/psapi/sampleMethods](http://localhost:8080/psapi/sampleMethods) + +--- +### SampleUnitTypes +- List SampleUnitTypes \ + [http://localhost:8080/psapi/sampleUnitTypes](http://localhost:8080/psapi/sampleUnitTypes) + +--- +### FieldOffices +- List FieldOffices \ + [http://localhost:8080/psapi/fieldOffices](http://localhost:8080/psapi/fieldOffices) --- ### Seasons @@ -11,96 +45,676 @@ Table of Contents [http://localhost:8080/psapi/seasons](http://localhost:8080/psapi/seasons) --- -### Uploads -- Site Upload \ - `http://localhost:8080/psapi/siteUpload` +### Segments +- List SeasSegmentsons \ + [http://localhost:8080/psapi/segments](http://localhost:8080/psapi/segments) + +--- +### Bends +- List Bends \ + [http://localhost:8080/psapi/bends](http://localhost:8080/psapi/bends) + +--- +### SiteDataEntry +- List siteDataEntry \ + [http://localhost:8080/psapi/siteDataEntry?fieldId=20120827-031-01-01-01&orderby=f_id%20desc&page=0&size=5](http://localhost:8080/psapi/fishDataEntry?fieldId=20120827-031-01-01-01&orderby=f_id%20desc&page=0&size=5) +- Create siteDataEntry \ + `http://localhost:8080/psapi/siteDataEntry` - Example `POST` body ``` - [ - { - "siteId": 0, - "siteFid": "F-1", - "siteYear": 2021, - "fieldofficeID": "SD", - "fieldOffice": "SD - South Dakota Game Fish & Parks", - "projectId": 1, - "project": "1 - Pallid Sturgeon Population Assessment", - "segmentId": 0, - "segment": "7 - Gavins Point Dam to Ponca", - "seasonId": "FC", - "season": "FC - Fish Community", - "bend": 10, - "bendrn": "R", - "bendRiverMile": 799.5, - "comments": "test", - "uploadSessionId": 78, - "uploadFilename": "test_datasheet.csv" - }, - { - "siteId": 1, - "siteFid": "F-1", - "siteYear": 2021, - "fieldofficeID": "SD", - "fieldOffice": "SD - South Dakota Game Fish & Parks", - "projectId": 1, - "project": "1 - Pallid Sturgeon Population Assessment", - "segmentId": 0, - "segment": "7 - Gavins Point Dam to Ponca", - "seasonId": "FC", - "season": "FC - Fish Community", - "bend": 10, - "bendrn": "R", - "bendRiverMile": 799.5, - "comments": "test", - "uploadSessionId": 78, - "uploadFilename": "test_datasheet.csv" - } - ] -- Fish Upload \ - `http://localhost:8080/psapi/fishUpload` + { + "siteFid": "F-1", + "siteYear": 2013, + "fieldOffice": "SD", + "project": "1", + "segment": "7", + "season": "ST", + "sampleUnitTypeCode": "B", + "bendrn": "R", + "editInitials": "DG", + "comments": "changed year" + } +- Update siteDataEntry \ + `http://localhost:8080/psapi/siteDataEntry` + - Example `PUT` body + ``` + { + "siteId": 10122, + "siteFid": "F-1", + "siteYear": 2013, + "fieldOffice": "SD", + "project": "1", + "segment": "7", + "season": "ST", + "sampleUnitTypeCode": "B", + "bendrn": "R", + "editInitials": "DG", + "comments": "changed year2" + } +--- +### FishDataEntry +- List fishDataEntry \ + [http://localhost:8080/psapi/fishDataEntry?fieldId=20120827-031-01-01-01&orderby=f_id%20desc&page=0&size=5](http://localhost:8080/psapi/fishDataEntry?fieldId=20120827-031-01-01-01&orderby=f_id%20desc&page=0&size=5) +- Create fishDataEntry \ + `http://localhost:8080/psapi/fishDataEntry` - Example `POST` body ``` - [ - { - "siteId": 0, - "fFid": "20150409-144555560-038-002", - "mrFid": "20150409-144555560-038", - "panelhook": "50", - "bait": "W", - "species": "SNSG", - "length": 545, - "weight": 578, - "fishcount": 1, - "finCurl": "X", - "otolith": "M", - "rayspine": "X", - "scale": "X", - "ftnum": "45678", - "ftmr": "L", - "ftprefix": "C", - "comments": "test", - "uploadSessionId": 1606, - "uploadFilename": "test_datasheet.csv" + { + "id": "", + "siteId": 0, + "fieldOffice": "", + "project": "", + "segment": "", + "uniqueID": "", + "panelhook": "", + "ffid": "20120827-031-01-01-01", + "mrId": 950, + "bait": "", + "species": "WTBS", + "length": 101, + "weight": "", + "fishcount": 1, + "finCurl": "", + "otolith": "", + "rayspine": "", + "scale": "", + "ftprefix": "", + "ftnum": "", + "ftmr": "", + "editInitials": "DG" + } +- Update fishDataEntry \ + `http://localhost:8080/psapi/fishDataEntry` + - Example `PUT` body + ``` + { + "fid": 3000031, + "id": "", + "siteId": 0, + "fieldOffice": "", + "project": "", + "segment": "", + "uniqueID": "", + "panelhook": "", + "ffid": "20120827-031-01-01-01", + "mrId": 950, + "bait": "", + "species": "WTBS", + "length": 101, + "weight": "", + "fishcount": 1, + "finCurl": "", + "otolith": "", + "rayspine": "", + "scale": "", + "ftprefix": "", + "ftnum": "", + "ftmr": "", + "editInitials": "DG" + } +--- +### MoriverDataEntry +- List moriverDataEntry \ + [http://localhost:8080/psapi/moriverDataEntry?fieldId=20120827-031-01-01-01&orderby=f_id%20desc&page=0&size=5](http://localhost:8080/psapi/moriverDataEntry?fieldId=20120827-031-01-01-01&orderby=f_id%20desc&page=0&size=5) +- Create moriverDataEntry \ + `http://localhost:8080/psapi/moriverDataEntry` + - Example `POST` body + ``` + { + "mrFid": "20121113-003-04-01-04", + "siteId": 493, + "fieldOffice": "", + "project": "", + "segment": "", + "season": "ST", + "setdate": "2012-11-13T00:00:00Z", + "subsample": 4, + "subsamplepass": 1, + "subsampleROrN": "R", + "recorder": "TWH", + "gear": "GN18", + "gearType": "S", + "temp": 5.8, + "turbidity": 23, + "conductivity": "", + "do": "", + "distance": "", + "width": "", + "netrivermile": "", + "structurenumber": "", + "usgs": "", + "riverstage": "", + "discharge": "", + "u1": "", + "u2": "", + "u3": "", + "u4": "", + "u5": "", + "u6": "", + "u7": "", + "macro": "ISB", + "meso": "POOL", + "habitatrn": "R", + "qc": "", + "microStructure": "4", + "structureFlow": "2", + "structureMod": "2", + "setSite_1": "1", + "setSite_2": "5", + "setSite_3": "0", + "startTime": "14:38:12", + "startLatitude": 40.97622, + "startLongitude": -95.82979, + "stopTime": "9:58:25", + "stopLatitude": "", + "stopLongitude": "", + "depth1": 4, + "velocitybot1": "", + "velocity08_1": "", + "velocity02or06_1": "", + "depth2": 4.3, + "velocitybot2": "", + "velocity08_2": "", + "velocity02or06_2": "", + "depth3": 4.2, + "velocitybot3": "", + "velocity08_3": "", + "velocity02or06_3": "", + "watervel": "", + "cobble": "", + "organic": "", + "silt": "", + "sand": "", + "gravel": "", + "comments": "no flow taken in eddy \r\n", + "complete": "", + "checkby": "", + "noTurbidity": "", + "noVelocity": "", + "editInitials": "DG" + } +- Update moriverDataEntry \ + `http://localhost:8080/psapi/moriverDataEntry` + - Example `PUT` body + ``` + { + "mrId": 300080, + "mrFid": "20121113-003-04-01-04", + "siteId": 493, + "fieldOffice": "", + "project": "", + "segment": "", + "season": "ST", + "setdate": "2012-11-13T00:00:00Z", + "subsample": 4, + "subsamplepass": 1, + "subsampleROrN": "R", + "recorder": "TWH", + "gear": "GN18", + "gearType": "S", + "temp": 5.8, + "turbidity": 23, + "conductivity": "", + "do": "", + "distance": "", + "width": "", + "netrivermile": "", + "structurenumber": "", + "usgs": "", + "riverstage": "", + "discharge": "", + "u1": "", + "u2": "", + "u3": "", + "u4": "", + "u5": "", + "u6": "", + "u7": "", + "macro": "ISB", + "meso": "POOL", + "habitatrn": "R", + "qc": "", + "microStructure": "4", + "structureFlow": "2", + "structureMod": "2", + "setSite_1": "1", + "setSite_2": "5", + "setSite_3": "0", + "startTime": "14:38:12", + "startLatitude": 40.97622, + "startLongitude": -95.82979, + "stopTime": "9:58:25", + "stopLatitude": "", + "stopLongitude": "", + "depth1": 4, + "velocitybot1": "", + "velocity08_1": "", + "velocity02or06_1": "", + "depth2": 4.3, + "velocitybot2": "", + "velocity08_2": "", + "velocity02or06_2": "", + "depth3": 4.2, + "velocitybot3": "", + "velocity08_3": "", + "velocity02or06_3": "", + "watervel": "", + "cobble": "", + "organic": "", + "silt": "", + "sand": "", + "gravel": "", + "comments": "no flow taken in eddy \r\n", + "complete": "", + "checkby": "", + "noTurbidity": "", + "noVelocity": "", + "editInitials": "DG" + } +--- +### SupplementalDataEntry +- List supplementalDataEntry \ + [http://localhost:8080/psapi/supplementalDataEntry?fieldId=20120827-031-01-01-01&orderby=f_id%20desc&page=0&size=5](http://localhost:8080/psapi/supplementalDataEntry?fieldId=20120827-031-01-01-01&orderby=f_id%20desc&page=0&size=5) +- Create supplementalDataEntry \ + `http://localhost:8080/psapi/supplementalDataEntry` + - Example `POST` body + ``` + { + "fid": 11781, + "fFid": "20121024-032-03-01-14-001", + "mrId": "2412", + "tagnumber": "4706162c09", + "pitrn": "N", + "scuteloc": "N", + "scutenum": "", + "scuteloc2": "", + "scutenum2": "", + "elhv": "", + "elcolor": "N", + "erhv": "", + "ercolor": "N", + "cwtyn": "N", + "dangler": "N", + "genetic": "Y", + "geneticsVialNumber": "3860", + "broodstock": "", + "hatchWild": "", + "speciesId": "", + "archive": "", + "head": "", + "snouttomouth": "", + "inter": "", + "mouthwidth": "", + "mIb": "", + "lOb": "", + "lIb": "", + "rIb": "", + "rOb": "", + "anal": "", + "dorsal": "", + "status": "", + "hatcheryOrigin": "", + "sex": "", + "stage": "", + "recapture": "", + "photo": "", + "geneticNeeds": "", + "otherTagInfo": "", + "comments": "-\r\n", + "editInitials": "DG" + } +- Update supplementalDataEntry \ + `http://localhost:8080/psapi/supplementalDataEntry` + - Example `PUT` body + ``` + { + "sid": 100041, + "fid": 11781, + "fFid": "20121024-032-03-01-14-001", + "mrId": "2412", + "tagnumber": "4706162c09", + "pitrn": "N", + "scuteloc": "N", + "scutenum": "", + "scuteloc2": "", + "scutenum2": "", + "elhv": "", + "elcolor": "N", + "erhv": "", + "ercolor": "N", + "cwtyn": "N", + "dangler": "N", + "genetic": "Y", + "geneticsVialNumber": "3860", + "broodstock": "", + "hatchWild": "", + "speciesId": "", + "archive": "", + "head": "", + "snouttomouth": "", + "inter": "", + "mouthwidth": "", + "mIb": "", + "lOb": "", + "lIb": "", + "rIb": "", + "rOb": "", + "anal": "", + "dorsal": "", + "status": "", + "hatcheryOrigin": "", + "sex": "", + "stage": "", + "recapture": "", + "photo": "", + "geneticNeeds": "", + "otherTagInfo": "", + "comments": "-\r\n", + "editInitials": "DG" + } +--- +### MoriverDataSummary +- List moriverDataSummary \ + [http://localhost:8080/psapi/moriverDataSummary?year=2021&officeCode=MO&project=1&season=MR&month=10&fromDate=10%2F06%2F2020&toDate=10%2F06%2F2020&orderby=mr_id%20desc&page=0&size=5](http://localhost:8080/psapi/moriverDataSummary?year=2021&officeCode=MO&project=1&season=MR&month=10&fromDate=10%2F06%2F2020&toDate=10%2F06%2F2020&orderby=mr_id%20desc&page=0&size=5) + +--- +### SuppDataSummary +- List suppDataSummary \ + [http://localhost:8080/psapi/suppDataSummary?year=2021&officeCode=MO&project=1&season=MR&month=10&fromDate=10%2F06%2F2020&toDate=10%2F06%2F2020&orderby=mr_id%20desc&page=0&size=5](http://localhost:8080/psapi/suppDataSummary?year=2021&officeCode=MO&project=1&season=MR&month=10&fromDate=10%2F06%2F2020&toDate=10%2F06%2F2020&orderby=mr_id%20desc&page=0&size=5) + +--- +### MissouriDataSummary +- List missouriDataSummary \ + [http://localhost:8080/psapi/missouriDataSummary?year=2021&officeCode=MO&project=1&season=MR&month=10&fromDate=10%2F06%2F2020&toDate=10%2F06%2F2020&orderby=mr_id%20desc&page=0&size=5](http://localhost:8080/psapi/missouriDataSummary?year=2021&officeCode=MO&project=1&season=MR&month=10&fromDate=10%2F06%2F2020&toDate=10%2F06%2F2020&orderby=mr_id%20desc&page=0&size=5) + +--- +### GeneticDataSummary +- List missouriDataSummary \ + [http://localhost:8080/psapi/geneticDataSummary?year=2020&officeCode=KC&project=2&fromDate=06%2F12%2F2020&toDate=06%2F12%2F2020&page=0&size=5](http://localhost:8080/psapi/geneticDataSummary?year=2020&officeCode=KC&project=2&fromDate=06%2F12%2F2020&toDate=06%2F12%2F2020&page=0&size=5) + +--- +### SearchDataSummary +- List missouriDataSummary \ + [http://localhost:8080/psapi/searchDataSummary?orderby=se_id%20desc&page=0&size=5](http://localhost:8080/psapi/searchDataSummary?orderby=se_id%20desc&page=0&size=5) + +--- +### Upload +- Upload \ + `http://localhost:8080/psapi/upload` + - Example `POST` body + ``` + { + "editInitials": "DG", + "siteUpload": { + "uploadFilename": "pspa_sites_datasheet_20210617_1900_59.csv", + "items" :[ + { + "siteId": 0, + "siteFid": "F-1", + "siteYear": 2021, + "fieldofficeID": "SD", + "fieldOffice": "KC - Kansas City", + "projectId": 1, + "project": "1 - Pallid Sturgeon Population Assessment", + "segmentId": 28, + "segment": "28 - Osage River", + "seasonId": "A0", + "season": "A0 - Age 0", + "bend": 2, + "bendrn": "N", + "bendRiverMile": 4.8, + "comments": "test" + } + ] + }, + "fishUpload": { + "uploadFilename": "fish_datasheet_20210617_1900_59.csv", + "items": [ + { + "siteId": 0, + "fFid": "20210617-184105056-001-001", + "mrFid": "20210617-184105056-001", + "panelhook": "1", + "bait": "W", + "species": "PDSG", + "length": 2, + "weight": 2, + "fishcount": 1, + "finCurl": "Y", + "otolith": "D", + "rayspine": "X", + "scale": "X", + "ftnum": "45678", + "ftmr": "L", + "ftprefix": "BC", + "comments": "test" + } + ] }, - { - "siteId": 2, - "fFid": "20150409-144555560-038-002", - "mrFid": "20150409-144555560-038", - "panelhook": "50", - "bait": "W", - "species": "SNSG", - "length": 545, - "weight": 578, - "fishcount": 1, - "finCurl": "X", - "otolith": "M", - "rayspine": "X", - "scale": "X", - "ftnum": "45678", - "ftmr": "L", - "ftprefix": "C", - "comments": "test", - "uploadSessionId": 1606, - "uploadFilename": "test_datasheet.csv" + "searchUpload":{ + "uploadFilename": "search_effort_20210617_1900_59.csv", + "items": [ + { + "siteId": 0, + "seFid": "20210617-185747028-001", + "dsId": 1, + "siteFid": "F-1", + "searchDate": "2021-06-17T00:00:00Z", + "recorder": "NR", + "searchTypeCode": "BS", + "searchDay": 12345678, + "startTime": "18:58:06", + "startLatitude": 50, + "startLongitude": -88, + "stopTime": "18:58:08", + "stopLatitud": 50, + "stopLongitude": -88, + "temp": 30, + "conductivity": 22 + } + ] + }, + "telemetryUpload": { + "uploadFilename": "telemetry_20210617_1900_59.csv", + "items": [ + { + "tFid": "20210617-185747028-001-001", + "seFid": "20210617-185747028-001", + "bend": 2, + "radioTagNum": 1234567890, + "frequencyIdCode": 3, + "captureTime": "18:59:49", + "captureLatitude": 50, + "captureLongitude": -88, + "positionConfidence": 2, + "macroId": "CONF", + "mesoId": "CHNB", + "depth": 1, + "temp": 30, + "conductivity": 1, + "turbidity": 1, + "silt": 1, + "sand": 1, + "gravel": 1, + "comments": "comments" + } + ] + }, + "procedureUpload": { + "uploadFilename": "procedure_20210617_1900_59.csv", + "items": [ + { + "f_fid": "20210617-184105056-001-001", + "purposeCode": "RI", + "procedurDate": "2021-06-17T00:00:00Z", + "procedureStartTime": "1:00", + "procedureEndTime": "2:00", + "procedureBy": "NR", + "antibioticInjectionInd": 0, + "photoDorsalInd": 0, + "photoVentralInd": 0, + "photoLeftInd": 0, + "oldRadioTagNum": 0, + "oldFrequencyId": 0, + "dstSerialNum": 12345, + "dstStartDate": "2021-06-16T00:00:00Z", + "dstStartTime": "10:30", + "dstReimplantInd": 0, + "newRadioTagNum": 1234, + "newFrequencyId": 0, + "sexCode": "s", + "bloodSampleInd": 0, + "eggSampleInd": 0, + "comments": "comments", + "fishHealthComments": "fishHealthComments", + "evalLocationCode": "GP", + "spawnCode": "PS", + "visualReproStatusCode": "R", + "ultrasoundReproStatusCode": "R", + "expectedSpawnYear": 1, + "ltrasoundGonadLength": 1, + "gonadCondition": "1" + } + ] + }, + "supplementalUpload": { + "uploadFilename": "supplemental_20210617_1900_59.csv", + "items": [ + { + "siteId": 0, + "fFid": "20210617-184105056-001-001", + "mrFid": "20210617-184105056-001", + "tagnumber": "1234567890", + "pitrn": "N", + "scuteloc": "D", + "scutenum": 2, + "scuteloc2": "l", + "scutenum2": 2, + "elhv": "H", + "elcolor": "G", + "erhv": "H", + "ercolor": "G", + "cwtyn": "Y", + "dangler": "Y", + "genetic": "Y", + "geneticsVialNumber": "STURG-1", + "broodstock": 1, + "hatchWild": 1, + "speciesId": 1, + "archive": 1, + "head": 1, + "snouttomouth": 1, + "inter": 1, + "outhwidth": 1, + "mIb": 1, + "lOb": 1, + "lIb": 1, + "rIb": 1, + "rOb": 1, + "anal": 1, + "orsal": 1, + "status": "H", + "hatcheryOrigin": "H", + "sex": "s", + "stage": "g", + "recapture": "c", + "photo": "p", + "geneticNeeds": "geneticNeeds", + "otherTagInfo": "otherTagInfo", + "comments": "comments" + } + ] + }, + "moriverUpload": { + "uploadFilename":"missouri_river_datasheet_20210617_1900_59.csv", + "items": [ + { + "siteId": 0, + "siteFid": "F-1", + "mrFid": "20210617-184105056-001", + "season": "A0", + "setdate": "2021-06-17T00:00:00Z", + "subsample": 1, + "subsamplepass": 1, + "subsamplen": "R", + "recorder": "NR", + "gear": "BSQD", + "gearType": "E", + "temp": 1, + "turbidity": 1, + "conductivity": 1, + "do": 1, + "distance": 1, + "width": 1, + "netrivermile": 2.5, + "structurenumber": "sn", + "usgs": "usgs", + "riverstage": 1, + "discharge": 1, + "u1": "u1", + "u2": "u2", + "u3": "u3", + "u4": "u4", + "u5": "u5", + "u6": "u6", + "u7": "u7", + "macro": "BRAD", + "meso": "CHNB", + "habitatrn": "N", + "qc": "q", + "microStructure": "m", + "structureFlow": "0", + "structureMod": "0", + "setSite_1": "0", + "setSite_2": "0", + "setSite_3": "3", + "startTime": "10:10:00 AM", + "startLatitude": 36, + "startLongitude": -88, + "stopTime": "6:57:10 PM", + "stopLatitude": 36, + "stopLongitude": -88, + "depth1": 1, + "velocitybot1": 1, + "velocity08_1": 1, + "velocity02or06_1": 1, + "depth2": 1, + "velocitybot2": 1, + "velocity08_2": 1, + "velocity02or06_2": 1, + "depth3": 1, + "velocitybot3": 1, + "velocity08_3": 1, + "velocity02or06_3": 1, + "watervel": 1, + "cobble": 0, + "organic": 1, + "silt": 30, + "sand": 30, + "gravel": 40, + "comments": "comments", + "complete": 1, + "checkby": "che", + "noTurbidity": "n", + "noVelocity": "n" + } + ] } - ] \ No newline at end of file + } + + - Example `POST` response + ``` + { + "uploadSessionId": 5308, + "uploadedBy": "DeeLiang", + "siteCntFinal": 1, + "mrCntFinal": 1, + "fishCntFinal": 1, + "searchCntFinal": 1, + "suppCntFinal": 1, + "telemetryCntFinal": 1, + "procedureCntFinal": 1, + "noSiteCnt": 0, + "siteMatch": 0, + "noSiteIDMsg": "" + } diff --git a/docs/apidoc.yaml b/docs/apidoc.yaml index 425f134..d32486a 100644 --- a/docs/apidoc.yaml +++ b/docs/apidoc.yaml @@ -7,10 +7,82 @@ info: servers: - url: http://localhost:8080/psapi paths: + /projects: + get: + summary: List projects + operationId: GetProjects + tags: + - projects + responses: + '200': + description: Array of projects + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/project" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /fieldOfficeSegments: + get: + summary: List fieldOfficeSegments + operationId: GetFieldOfficeSegments + tags: + - fieldOfficeSegments + responses: + '200': + description: Array of fieldOfficeSegments + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/fieldOfficeSegment" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /fieldOffices: + get: + summary: List fieldOffices + operationId: GetFieldOffices + tags: + - fieldOffices + responses: + '200': + description: Array of fieldOffices + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/fieldOffice" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" /seasons: get: - summary: List Seasons - operationId: GetSeasons + summary: List seasons + operationId: GetsSeasons tags: - seasons responses: @@ -31,15 +103,256 @@ paths: application/json: schema: $ref: "#/components/schemas/Error" - /siteUpload: + /sampleMethods: + get: + summary: List sampleMethods + operationId: GetSampleMethods + tags: + - sampleMethods + responses: + '200': + description: Array of sampleMethods + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/sampleMethod" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /sampleUnitTypes: + get: + summary: List sampleUnitTypes + operationId: GetSampleUnitTypes + tags: + - sampleUnitTypes + responses: + '200': + description: Array of sampleUnitTypes + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/sampleUnitType" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /segments: + get: + summary: List segments + operationId: GetSegments + tags: + - segments + responses: + '200': + description: Array of segments + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/segment" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /bends: + get: + summary: List bends + operationId: GetBends + tags: + - bends + responses: + '200': + description: Array of bends + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/bend" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /downloadInfo: + get: + summary: Download Info + operationId: GetDownloadInfo + tags: + - downloadInfo + responses: + '200': + description: downloadInfo + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/downloadInfo" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /upload: + /downloadZip: + get: + summary: Download Zip + operationId: downloadZip + tags: + - downloadZip + responses: + '200': + description: downloadZip + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/downloadZip" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /upload: + /siteDataEntry: + get: + summary: List Site Entries + operationId: GetSiteEntries + tags: + - siteEntries + parameters: + - name: year + in: query + required: false + description: The year + schema: + type: string + - name: projectCode + in: query + required: false + description: The project code + schema: + type: string + - name: segmentCode + in: query + required: false + description: The segment code + schema: + type: string + - name: seasonCode + in: query + required: false + description: The season code + schema: + type: string + - name: bendrn + in: query + required: false + description: The bendrn + schema: + type: string + - name: orderby + in: query + required: false + description: The orderby + schema: + type: string + - name: page + in: query + required: false + description: The page + schema: + type: integer + - name: size + in: query + required: false + description: The size + schema: + type: integer + responses: + '200': + description: Array of siteDataEntries + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/uploadSite" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /upload: post: tags: - - "siteUpload" - summary: "Add a site upload" + - "saveSiteEntries" + summary: "Create Site Entries" + description: "" + operationId: "saveSiteEntries" + requestBody: + description: "object of site entries" + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/uploadSite" + responses: + '201': + description: Null response + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + put: + tags: + - "updateSiteEntries" + summary: "Update Site Entries" description: "" - operationId: "addSiteUpload" + operationId: "updateSiteEntries" requestBody: - description: "Site upload object that needs to be uploaded" + description: "object of site entries" required: true content: application/json: @@ -48,21 +361,104 @@ paths: responses: '201': description: Null response + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /fishDataEntry: + get: + summary: List Fish Entries + operationId: GetFishEntries + tags: + - fishEntries + parameters: + - name: tableId + in: query + required: false + description: The table id + schema: + type: string + - name: fieldId + in: query + required: false + description: The field id + schema: + type: string + - name: mrId + in: query + required: false + description: The mr id + schema: + type: string + - name: orderby + in: query + required: false + description: The orderby + schema: + type: string + - name: page + in: query + required: false + description: The page + schema: + type: integer + - name: size + in: query + required: false + description: The size + schema: + type: integer + responses: + '200': + description: Array of fishDataEntries + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/uploadFish" default: description: unexpected error content: application/json: schema: $ref: "#/components/schemas/Error" - /fishUpload: + /upload: post: tags: - - "fishUpload" - summary: "Add a fish upload" + - "saveFishEntries" + summary: "Create Fish Entries" + description: "" + operationId: "saveFishEntries" + requestBody: + description: "object of fish entries" + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/uploadFish" + responses: + '201': + description: Null response + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + put: + tags: + - "updateFishEntries" + summary: "Update Fish Entries" description: "" - operationId: "addFishUpload" + operationId: "updateFishEntries" requestBody: - description: "Fish upload object that needs to be uploaded" + description: "object of fish entries" required: true content: application/json: @@ -71,54 +467,2023 @@ paths: responses: '201': description: Null response + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /moriverDataEntry: + get: + summary: List Moriver Entries + operationId: GetMoriverEntries + tags: + - moriverEntries + parameters: + - name: tableId + in: query + required: false + description: The table id + schema: + type: string + - name: fieldId + in: query + required: false + description: The field id + schema: + type: string + - name: orderby + in: query + required: false + description: The orderby + schema: + type: string + - name: page + in: query + required: false + description: The page + schema: + type: integer + - name: size + in: query + required: false + description: The size + schema: + type: integer + responses: + '200': + description: Array of moriverDataEntries + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/uploadMoriver" default: description: unexpected error content: application/json: schema: $ref: "#/components/schemas/Error" -components: - schemas: - season: - type: object - required: - - id - - code - - description - - fieldAppFlag - - ProjectCode + post: + tags: + - "saveMoriverEntries" + summary: "Create Moriver Entries" + description: "" + operationId: "saveMoriverEntries" + requestBody: + description: "object of moriver entries" + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/uploadMoriver" + responses: + '201': + description: Null response + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + put: + tags: + - "updateMoriverEntries" + summary: "Update Moriver Entries" + description: "" + operationId: "updateMoriverEntries" + requestBody: + description: "object of moriver entries" + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/uploadMoriver" + responses: + '201': + description: Null response + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /supplementalDataEntry: + get: + summary: List Supplemental Entries + operationId: GetSupplementalEntries + tags: + - supplementalEntries + parameters: + - name: tableId + in: query + required: false + description: The table id + schema: + type: string + - name: fieldId + in: query + required: false + description: The field id + schema: + type: string + - name: geneticsVial + in: query + required: false + description: The field id + schema: + type: string + - name: pitTag + in: query + required: false + description: The field id + schema: + type: string + - name: orderby + in: query + required: false + description: The orderby + schema: + type: string + - name: page + in: query + required: false + description: The page + schema: + type: integer + - name: size + in: query + required: false + description: The size + schema: + type: integer + responses: + '200': + description: Array of supplementalDataEntries + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/uploadSupplemental" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + post: + tags: + - "saveSupplementalEntries" + summary: "Create Supplemental Entries" + description: "" + operationId: "saveSupplementalEntries" + requestBody: + description: "object of supplemental entries" + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/uploadSupplemental" + responses: + '201': + description: Null response + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + put: + tags: + - "updateSupplementalEntries" + summary: "Update Supplemental Entries" + description: "" + operationId: "updateSupplementalEntries" + requestBody: + description: "object of supplemental entries" + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/uploadSupplemental" + responses: + '201': + description: Null response + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /errorCount: + get: + summary: List errorCounts + operationId: GetErrorCounts + tags: + - errorCounts + responses: + '200': + description: Array of errorCounts + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/errorCount" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /usgNoVialNumbers: + get: + summary: List usgNoVialNumbers + operationId: GetUsgNoVialNumbers + tags: + - usgNoVialNumbers + responses: + '200': + description: Array of usgNoVialNumbers + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/usgNoVialNumber" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /unapprovedDataSheets: + get: + summary: List unapprovedDataSheets + operationId: GetUnapprovedDataSheets + tags: + - unapprovedDataSheets + responses: + '200': + description: Array of unapprovedDataSheets + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/unapprovedDataSheet" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /uncheckedDataSheets: + get: + summary: List uncheckedDataSheets + operationId: GetUncheckedDataSheets + tags: + - uncheckedDataSheets + parameters: + - name: orderby + in: query + required: false + description: The orderby + schema: + type: string + - name: page + in: query + required: false + description: The page + schema: + type: integer + - name: size + in: query + required: false + description: The size + schema: + type: integer + responses: + '200': + description: Array of uncheckedDataSheets + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/uncheckedDataSheet" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /fishFullDataSummary: + get: + summary: List Full Fish Summaries + operationId: GetFullFishSummaries + tags: + - fullFishSummaries + parameters: + - name: year + in: query + required: false + description: The year + schema: + type: string + - name: officeCode + in: query + required: false + description: The office code + schema: + type: string + - name: project + in: query + required: false + description: The project + schema: + type: string + - name: approved + in: query + required: false + description: The approved + schema: + type: string + - name: season + in: query + required: false + description: The season + schema: + type: string + - name: spice + in: query + required: false + description: The spice + schema: + type: string + - name: month + in: query + required: false + description: The month + schema: + type: string + - name: fromDate + in: query + required: false + description: The fromDate + schema: + type: string + - name: toDate + in: query + required: false + description: The toDate + schema: + type: string + responses: + '200': + description: Array of fishSummaries + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + type: object + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /fishDataSummary: + get: + summary: List Fish Summaries + operationId: GetFishSummaries + tags: + - fishSummaries + parameters: + - name: year + in: query + required: false + description: The year + schema: + type: string + - name: officeCode + in: query + required: false + description: The office code + schema: + type: string + - name: project + in: query + required: false + description: The project + schema: + type: string + - name: approved + in: query + required: false + description: The approved + schema: + type: string + - name: season + in: query + required: false + description: The season + schema: + type: string + - name: spice + in: query + required: false + description: The spice + schema: + type: string + - name: month + in: query + required: false + description: The month + schema: + type: string + - name: fromDate + in: query + required: false + description: The fromDate + schema: + type: string + - name: toDate + in: query + required: false + description: The toDate + schema: + type: string + - name: orderby + in: query + required: false + description: The orderby + schema: + type: string + - name: page + in: query + required: false + description: The page + schema: + type: integer + - name: size + in: query + required: false + description: The size + schema: + type: integer + responses: + '200': + description: Array of fishSummaries + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/fishSummary" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /suppFullDataSummary: + get: + summary: List Full Supp Summaries + operationId: GetFullSuppSummaries + tags: + - suppFullSummaries + parameters: + - name: year + in: query + required: false + description: The year + schema: + type: string + - name: officeCode + in: query + required: false + description: The office code + schema: + type: string + - name: project + in: query + required: false + description: The project + schema: + type: string + - name: approved + in: query + required: false + description: The approved + schema: + type: string + - name: season + in: query + required: false + description: The season + schema: + type: string + - name: spice + in: query + required: false + description: The spice + schema: + type: string + - name: month + in: query + required: false + description: The month + schema: + type: string + - name: fromDate + in: query + required: false + description: The fromDate + schema: + type: string + - name: toDate + in: query + required: false + description: The toDate + schema: + type: string + responses: + '200': + description: Array of SuppSummaries + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + type: object + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /suppDataSummary: + get: + summary: List Supp Summaries + operationId: GetSuppSummaries + tags: + - suppSummaries + parameters: + - name: year + in: query + required: false + description: The year + schema: + type: string + - name: officeCode + in: query + required: false + description: The office code + schema: + type: string + - name: project + in: query + required: false + description: The project + schema: + type: string + - name: approved + in: query + required: false + description: The approved + schema: + type: string + - name: season + in: query + required: false + description: The season + schema: + type: string + - name: spice + in: query + required: false + description: The spice + schema: + type: string + - name: month + in: query + required: false + description: The month + schema: + type: string + - name: fromDate + in: query + required: false + description: The fromDate + schema: + type: string + - name: toDate + in: query + required: false + description: The toDate + schema: + type: string + - name: orderby + in: query + required: false + description: The orderby + schema: + type: string + - name: page + in: query + required: false + description: The page + schema: + type: integer + - name: size + in: query + required: false + description: The size + schema: + type: integer + responses: + '200': + description: Array of SuppSummaries + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/suppSummary" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /missouriFullDataSummary: + get: + summary: List Full Missouri Summaries + operationId: GetMissouriFullSummaries + tags: + - missouriFullSummaries + parameters: + - name: year + in: query + required: false + description: The year + schema: + type: string + - name: officeCode + in: query + required: false + description: The office code + schema: + type: string + - name: project + in: query + required: false + description: The project + schema: + type: string + - name: approved + in: query + required: false + description: The approved + schema: + type: string + - name: season + in: query + required: false + description: The season + schema: + type: string + - name: spice + in: query + required: false + description: The spice + schema: + type: string + - name: month + in: query + required: false + description: The month + schema: + type: string + - name: fromDate + in: query + required: false + description: The fromDate + schema: + type: string + - name: toDate + in: query + required: false + description: The toDate + schema: + type: string + responses: + '200': + description: Array of SuppSummaries + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + type: object + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /missouriDataSummary: + get: + summary: List Missouri Summaries + operationId: GetMissouriSummaries + tags: + - missouriSummaries + parameters: + - name: year + in: query + required: false + description: The year + schema: + type: string + - name: officeCode + in: query + required: false + description: The office code + schema: + type: string + - name: project + in: query + required: false + description: The project + schema: + type: string + - name: approved + in: query + required: false + description: The approved + schema: + type: string + - name: season + in: query + required: false + description: The season + schema: + type: string + - name: spice + in: query + required: false + description: The spice + schema: + type: string + - name: month + in: query + required: false + description: The month + schema: + type: string + - name: fromDate + in: query + required: false + description: The fromDate + schema: + type: string + - name: toDate + in: query + required: false + description: The toDate + schema: + type: string + - name: orderby + in: query + required: false + description: The orderby + schema: + type: string + - name: page + in: query + required: false + description: The page + schema: + type: integer + - name: size + in: query + required: false + description: The size + schema: + type: integer + responses: + '200': + description: Array of SuppSummaries + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/missouriSummary" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /geneticFullDataSummary: + get: + summary: List Full Genetic Summaries + operationId: GetFullGeneticSummaries + tags: + - geneticFullSummaries + parameters: + - name: year + in: query + required: false + description: The year + schema: + type: string + - name: officeCode + in: query + required: false + description: The office code + schema: + type: string + - name: project + in: query + required: false + description: The project + schema: + type: string + - name: fromDate + in: query + required: false + description: The fromDate + schema: + type: string + - name: toDate + in: query + required: false + description: The toDate + schema: + type: string + - name: broodstock + in: query + required: false + description: The broodstock + schema: + type: string + - name: hatchwild + in: query + required: false + description: The hatchwild + schema: + type: string + - name: speciesId + in: query + required: false + description: The speciesId + schema: + type: string + - name: archive + in: query + required: false + description: The archive + schema: + type: string + responses: + '200': + description: Array of GeneticSummaries + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + type: object + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /geneticDataSummary: + get: + summary: List Genetic Summaries + operationId: GetGeneticSummaries + tags: + - geneticSummaries + parameters: + - name: year + in: query + required: false + description: The year + schema: + type: string + - name: officeCode + in: query + required: false + description: The office code + schema: + type: string + - name: project + in: query + required: false + description: The project + schema: + type: string + - name: fromDate + in: query + required: false + description: The fromDate + schema: + type: string + - name: toDate + in: query + required: false + description: The toDate + schema: + type: string + - name: broodstock + in: query + required: false + description: The broodstock + schema: + type: string + - name: hatchwild + in: query + required: false + description: The hatchwild + schema: + type: string + - name: speciesId + in: query + required: false + description: The speciesId + schema: + type: string + - name: archive + in: query + required: false + description: The archive + schema: + type: string + - name: orderby + in: query + required: false + description: The orderby + schema: + type: string + - name: page + in: query + required: false + description: The page + schema: + type: integer + - name: size + in: query + required: false + description: The size + schema: + type: integer + responses: + '200': + description: Array of GeneticSummaries + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/geneticSummary" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /searchFullDataSummary: + get: + summary: List Full Search Summaries + operationId: GetFullSearchSummaries + tags: + - searchFullSummaries + responses: + '200': + description: Array of SearchSummaries + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + type: object + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /searchDataSummary: + get: + summary: List Search Summaries + operationId: GetSearchSummaries + tags: + - searchSummaries + parameters: + - name: orderby + in: query + required: false + description: The orderby + schema: + type: string + - name: page + in: query + required: false + description: The page + schema: + type: integer + - name: size + in: query + required: false + description: The size + schema: + type: integer + - name: filter + in: query + required: false + description: The filter + schema: + type: string + responses: + '200': + description: Array of SearchSummaries + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + $ref: "#/components/schemas/searchSummary" + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /telemetryFullDataSummary: + get: + summary: List Full Telemetry Summaries + operationId: GetFullTelemetrySummaries + tags: + - telemetryFullSummaries + parameters: + - name: year + in: query + required: false + description: The year + schema: + type: string + - name: officeCode + in: query + required: false + description: The office code + schema: + type: string + - name: project + in: query + required: false + description: The project + schema: + type: string + - name: approved + in: query + required: false + description: The approved + schema: + type: string + - name: season + in: query + required: false + description: The season + schema: + type: string + - name: spice + in: query + required: false + description: The spice + schema: + type: string + - name: month + in: query + required: false + description: The month + schema: + type: string + - name: fromDate + in: query + required: false + description: The fromDate + schema: + type: string + - name: toDate + in: query + required: false + description: The toDate + schema: + type: string + responses: + '200': + description: Array of SuppSummaries + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + type: object + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /telemetryDataSummary: + get: + summary: List Telemetry Summaries + operationId: GetTelemetrySummaries + tags: + - telemetrySummaries + parameters: + - name: year + in: query + required: false + description: The year + schema: + type: string + - name: officeCode + in: query + required: false + description: The office code + schema: + type: string + - name: project + in: query + required: false + description: The project + schema: + type: string + - name: approved + in: query + required: false + description: The approved + schema: + type: string + - name: season + in: query + required: false + description: The season + schema: + type: string + - name: spice + in: query + required: false + description: The spice + schema: + type: string + - name: month + in: query + required: false + description: The month + schema: + type: string + - name: fromDate + in: query + required: false + description: The fromDate + schema: + type: string + - name: toDate + in: query + required: false + description: The toDate + schema: + type: string + - name: orderby + in: query + required: false + description: The orderby + schema: + type: string + - name: page + in: query + required: false + description: The page + schema: + type: integer + - name: size + in: query + required: false + description: The size + schema: + type: integer + responses: + '200': + description: Array of SuppSummaries + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + type: object + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /procedureFullDataSummary: + get: + summary: List Full Procedure Summaries + operationId: GetFullProcedureSummaries + tags: + - procedureFullSummaries + parameters: + - name: year + in: query + required: false + description: The year + schema: + type: string + - name: officeCode + in: query + required: false + description: The office code + schema: + type: string + - name: project + in: query + required: false + description: The project + schema: + type: string + - name: approved + in: query + required: false + description: The approved + schema: + type: string + - name: season + in: query + required: false + description: The season + schema: + type: string + - name: spice + in: query + required: false + description: The spice + schema: + type: string + - name: month + in: query + required: false + description: The month + schema: + type: string + - name: fromDate + in: query + required: false + description: The fromDate + schema: + type: string + - name: toDate + in: query + required: false + description: The toDate + schema: + type: string + responses: + '200': + description: Array of SuppSummaries + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + type: object + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /procedureDataSummary: + get: + summary: List Procedure Summaries + operationId: GetProcedureSummaries + tags: + - procedureSummaries + parameters: + - name: year + in: query + required: false + description: The year + schema: + type: string + - name: officeCode + in: query + required: false + description: The office code + schema: + type: string + - name: project + in: query + required: false + description: The project + schema: + type: string + - name: approved + in: query + required: false + description: The approved + schema: + type: string + - name: season + in: query + required: false + description: The season + schema: + type: string + - name: spice + in: query + required: false + description: The spice + schema: + type: string + - name: month + in: query + required: false + description: The month + schema: + type: string + - name: fromDate + in: query + required: false + description: The fromDate + schema: + type: string + - name: toDate + in: query + required: false + description: The toDate + schema: + type: string + - name: orderby + in: query + required: false + description: The orderby + schema: + type: string + - name: page + in: query + required: false + description: The page + schema: + type: integer + - name: size + in: query + required: false + description: The size + schema: + type: integer + responses: + '200': + description: Array of SuppSummaries + headers: + x-next: + description: A link to the next page of responses + schema: + type: string + content: + application/json: + schema: + type: object + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + /upload: + post: + tags: + - "upload" + summary: "upload all data" + description: "" + operationId: "addUpload" + requestBody: + description: "Collection of different types of uploads" + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/upload" + responses: + '201': + description: Null response + default: + description: unexpected error + content: + application/json: + schema: + $ref: "#/components/schemas/Error" +components: + schemas: + project: + type: object + required: + - code + - description + properties: + code: + type: integer + format: int32 + description: + type: string + season: + type: object + required: + - id + - code + - description + - fieldAppFlag + - ProjectCode + properties: + id: + type: integer + format: int32 + code: + type: string + description: + type: string + fieldAppFlag: + type: string + ProjectCode: + type: integer + format: int32 + fieldOffice: + type: object + required: + - id + - code + - description + - state + properties: + id: + type: integer + format: int32 + code: + type: string + description: + type: string + state: + type: string + sampleMethod: + type: object + required: + - code + - description + properties: + code: + type: string + description: + type: string + sampleUnitType: + type: object + required: + - code + - description + properties: + code: + type: string + description: + type: string + segment: + type: object + required: + - id + - code + - type properties: id: type: integer format: int32 code: + type: integer + format: int32 + description: + type: string + type: type: string + upperRiverMile: + type: integer + format: int32 + lowerRiverMile: + type: integer + format: int32 + rpma: + type: integer + format: int32 + bend: + type: object + required: + - id + - code + - bendNumber + - state + properties: + id: + type: integer + format: int32 + bendNumber: + type: integer + format: int32 description: type: string - fieldAppFlag: + segmentCode: + type: integer + format: int32 + upperRiverMile: + type: integer + format: int32 + lowerRiverMile: + type: integer + format: int32 + state: + type: integer + format: int32 + fishSummary: + type: object + properties: + uniqueID: + type: integer + format: int32 + fishId: + type: integer + format: int32 + year: + type: integer + format: int32 + fieldOffice: type: string - ProjectCode: + project: + type: integer + format: int32 + segment: + type: integer + format: int32 + season: + type: string + bend: + type: integer + format: int32 + bendrn: + type: string + bendRiverMile: + type: number + format: float64 + panelhook: + type: string + species: + type: string + hatcheryOrigin: + type: string + checkedby: + type: string + suppSummary: + type: object + properties: + fishCode: + type: string + uniqueID: + type: integer + format: int32 + fishId: + type: integer + format: int32 + year: + type: integer + format: int32 + SuppID: + type: integer + format: int32 + fieldOffice: + type: string + project: + type: integer + format: int32 + segment: + type: integer + format: int32 + season: + type: string + bend: + type: integer + format: int32 + bendrn: + type: string + bendRiverMile: + type: number + format: float64 + hatcheryOrigin: + type: string + checkedby: + type: string + missouriSummary: + type: object + properties: + uniqueID: + type: integer + format: int32 + year: + type: integer + format: int32 + fieldOffice: + type: string + project: + type: integer + format: int32 + segment: + type: integer + format: int32 + season: + type: string + bend: + type: integer + format: int32 + bendrn: + type: string + bendRiverMile: + type: number + format: float64 + subsample: + type: integer + format: int32 + pass: + type: integer + format: int32 + setDate: + type: string + conductivity: + type: string + checkedby: + type: string + geneticSummary: + type: object + properties: + year: + type: integer + format: int32 + fieldOffice: + type: string + project: + type: integer + format: int32 + sturgeonType: + type: string + GeneticsVialNumber: + type: string + pitTag: + type: string + river: + type: string + riverMile: + type: number + format: float64 + state: + type: string + setDate: + type: string + conductivity: + type: string + checkedby: + type: string + broodstock: + type: string + hatchWild: + type: string + speciesId: + type: string + archive: + type: string + searchSummary: + type: object + properties: + seId: + type: integer + format: int32 + siteId: + type: integer + format: int32 + seFid: + type: string + maxLength: 199 + dsId: + type: integer + format: int32 + siteFid: + type: string + maxLength: 199 + searchDate: + type: string + recorder: + type: string + maxLength: 3 + startTime: + type: string + maxLength: 11 + startLatitude: + type: number + startLongitude: + type: number + stopTime: + type: string + maxLength: 11 + stopLatitud: + type: number + stopLongitude: + type: number + temp: + type: string + conductivity: + type: string + fishDataEntry: + type: object + properties: + id: + type: integer + format: int32 + mrId: + type: integer + format: int32 + panelhook: + type: string + maxLength: 5 + bait: + type: string + maxLength: 1 + species: + type: string + maxLength: 4 + length: + type: number + weight: + type: number + fishcount: + type: integer + format: int32 + finCurl: + type: string + maxLength: 1 + otolith: + type: string + maxLength: 1 + rayspine: + type: string + maxLength: 1 + scale: + type: string + maxLength: 1 + ftprefix: + type: string + maxLength: 3 + ftnum: + type: string + maxLength: 7 + ftmr: + type: string + maxLength: 1 + uniqueID: + type: integer + format: int32 + upload: + type: object + properties: + editInitials: + type: string + maxLength: 1 + siteUpload: + type: array + items: + $ref: "#/components/schemas/uploadSite" + fishUpload: + type: array + items: + $ref: "#/components/schemas/uploadFish" + telemetryUpload: + type: array + items: + $ref: "#/components/schemas/uploadTelemetry" + searchUpload: + type: array + items: + $ref: "#/components/schemas/uploadSearch" + procedureUpload: + type: array + items: + $ref: "#/components/schemas/uploadProcedure" + supplementalUpload: + type: array + items: + $ref: "#/components/schemas/uploadSupplemental" + moriverUpload: + type: array + items: + $ref: "#/components/schemas/uploadMoriver" + uploadSiteData: + type: object + properties: + uploadFilename: + type: string + maxLength: 1 + items: + type: array + items: + $ref: "#/components/schemas/uploadSite" + uploadFishData: + type: object + properties: + uploadFilename: + type: string + maxLength: 1 + items: + type: array + items: + $ref: "#/components/schemas/uploadFish" + uploadTelemetryData: + type: object + properties: + uploadFilename: + type: string + maxLength: 1 + items: + type: array + items: + $ref: "#/components/schemas/uploadTelemetry" + uploadSearchData: + type: object + properties: + uploadFilename: + type: string + maxLength: 1 + items: + type: array + items: + $ref: "#/components/schemas/uploadSearch" + uploadProcedureData: + type: object + properties: + uploadFilename: + type: string + maxLength: 1 + items: + type: array + items: + $ref: "#/components/schemas/uploadProcedure" + uploadSupplementalData: + type: object + properties: + uploadFilename: + type: string + maxLength: 1 + items: + type: array + items: + $ref: "#/components/schemas/uploadSupplemental" + uploadMoriverData: + type: object + properties: + uploadFilename: + type: string + maxLength: 1 + items: + type: array + items: + $ref: "#/components/schemas/uploadMoriver" + procedureOut: + type: object + required: + - uploadSessionId + - uploadBy + properties: + uploadSessionId: + type: integer + format: int32 + uploadBy: + type: string + maxLength: 20 + siteCntFinal: + type: integer + format: int32 + mrCntFinal: + type: integer + format: int32 + fishCntFinal: + type: integer + format: int32 + searchCntFinal: + type: integer + format: int32 + suppCntFinal: + type: integer + format: int32 + telemetryCntFinal: + type: integer + format: int32 + procedureCntFinal: + type: integer + format: int32 + noSiteCnt: + type: integer + format: int32 + siteMatch: type: integer format: int32 + noSiteIDMsg: + type: string uploadSite: type: object required: - siteId - - uploadSessionId - - uploadFilename properties: siteId: type: integer format: int32 siteFid: type: string + maxLength: 50 siteYear: type: integer format: int32 fieldofficeID: type: string + maxLength: 10 fieldOffice: type: string + maxLength: 255 projectId: type: integer format: int32 @@ -129,44 +2494,50 @@ components: format: int32 segment: type: string + maxLength: 255 seasonId: type: string + maxLength: 5 season: type: string + maxLength: 5 bend: type: integer format: int32 bendrn: type: string + maxLength: 10 bendRiverMile: type: number comments: type: string - uploadSessionId: - type: integer - format: int32 + maxLength: 2000 uploadFilename: type: string + maxLength: 200 uploadFish: type: object required: - siteId - - uploadSessionId - - uploadFilename properties: siteId: type: integer format: int32 fFid: type: string + maxLength: 50 mrFid: type: string + maxLength: 50 panelhook: type: string + maxLength: 5 bait: type: string + maxLength: 1 species: type: string + maxLength: 4 length: type: number weight: @@ -176,25 +2547,535 @@ components: format: int32 finCurl: type: string + maxLength: 1 otolith: type: string + maxLength: 1 rayspine: type: string + maxLength: 1 scale: type: string + maxLength: 1 ftprefix: type: string + maxLength: 3 ftnum: type: string + maxLength: 7 ftmr: type: string + maxLength: 1 comments: type: string - uploadSessionId: + maxLength: 2000 + uploadFilename: + type: string + maxLength: 200 + uploadTelemetry: + type: object + required: + - tFid + properties: + tFid: + type: string + maxLength: 50 + seFid: + type: string + maxLength: 199 + bend: + type: number + radioTagNum: + type: integer + format: int32 + frequencyIdCode: + type: integer + format: int32 + captureTime: + type: string + maxLength: 9 + captureLatitude: + type: number + captureLongitude: + type: number + positionConfidence: + type: number + macroId: + type: string + maxLength: 10 + mesoId: + type: string + maxLength: 10 + depth: + type: number + temp: + type: number + conductivity: + type: number + turbidity: + type: number + silt: + type: number + sand: + type: number + gravel: + type: number + comments: + type: string + maxLength: 20 + uploadFilename: + type: string + maxLength: 200 + uploadSearch: + type: object + required: + - siteId + properties: + siteId: + type: integer + format: int32 + seFid: + type: string + maxLength: 199 + dsId: + type: integer + format: int32 + siteFid: + type: string + maxLength: 199 + searchDate: + type: string + recorder: + type: string + maxLength: 3 + searchTypeCode: + type: string + maxLength: 2 + searchDay: + type: integer + format: int32 + startTime: + type: string + maxLength: 11 + startLatitude: + type: number + startLongitude: + type: number + stopTime: + type: string + maxLength: 11 + stopLatitud: + type: number + stopLongitude: + type: number + temp: + type: number + conductivity: + type: number + uploadFilename: + type: string + maxLength: 200 + uploadProcedure: + type: object + required: + - id + properties: + id: + type: integer + format: int32 + f_fid: + type: string + maxLength: 50 + purposeCode: + type: string + maxLength: 2 + procedurDate: + type: string + procedureStartTime: + type: string + maxLength: 11 + procedureEndTime: + type: string + maxLength: 11 + procedureBy: + type: string + maxLength: 3 + antibioticInjectionInd: + type: integer + format: int32 + photoDorsalInd: + type: integer + format: int32 + photoVentralInd: + type: integer + format: int32 + photoLeftInd: + type: integer + format: int32 + oldRadioTagNum: + type: integer + format: int32 + oldFrequencyId: + type: integer + format: int32 + dstSerialNum: + type: integer + format: int32 + dstStartDate: + type: string + dstStartTime: + type: string + dstReimplantInd: + type: integer + format: int32 + newRadioTagNum: + type: integer + format: int32 + newFrequencyId: + type: integer + format: int32 + sexCode: + type: string + maxLength: 1 + bloodSampleInd: + type: integer + format: int32 + eggSampleInd: + type: integer + format: int32 + comments: + type: string + maxLength: 2000 + fishHealthComments: + type: string + maxLength: 2000 + evalLocationCode: + type: string + maxLength: 2 + spawnCode: + type: string + maxLength: 2 + visualReproStatusCode: + type: string + maxLength: 4 + ultrasoundReproStatusCode: + type: string + maxLength: 4 + expectedSpawnYear: + type: integer + format: int32 + ltrasoundGonadLength: + type: number + gonadCondition: + type: string + maxLength: 50 + uploadedBy: + type: string + maxLength: 20 + uploadFilename: + type: string + maxLength: 200 + uploadSupplemental: + type: object + required: + - siteId + properties: + siteId: type: integer format: int32 + fFid: + type: string + maxLength: 50 + mrFid: + type: string + maxLength: 50 + tagnumber: + type: string + maxLength: 20 + pitrn: + type: string + maxLength: 5 + scuteloc: + type: string + maxLength: 1 + scutenum: + type: number + scuteloc2: + type: string + maxLength: 1 + scutenum2: + type: number + maxLength: 1 + elhv: + type: string + maxLength: 1 + elcolor: + type: string + maxLength: 1 + erhv: + type: string + maxLength: 1 + ercolor: + type: string + maxLength: 1 + cwtyn: + type: string + maxLength: 1 + dangler: + type: string + maxLength: 1 + genetic: + type: string + maxLength: 1 + geneticsVialNumber: + type: string + maxLength: 20 + broodstock: + type: number + hatchWild: + type: number + speciesId: + type: integer + format: int32 + archive: + type: integer + format: int32 + head: + type: number + snouttomouth: + type: number + inter: + type: number + outhwidth: + type: number + mIb: + type: number + lOb: + type: number + lIb: + type: number + rIb: + type: number + rOb: + type: number + anal: + type: number + orsal: + type: number + status: + type: string + hatcheryOrigin: + type: string + maxLength: 1 + sex: + type: string + maxLength: 5 + stage: + type: string + maxLength: 5 + recapture: + type: string + maxLength: 1 + photo: + type: string + maxLength: 1 + geneticNeeds: + type: string + maxLength: 100 + otherTagInfo: + type: string + maxLength: 500 + comments: + type: string + maxLength: 20 + uploadBy: + type: string + maxLength: 20 + uploadFilename: + type: string + maxLength: 200 + uploadMoriver: + type: object + required: + - siteId + properties: + siteId: + type: integer + format: int32 + siteFid: + type: string + maxLength: 50 + mrFid: + type: string + maxLength: 50 + season: + type: string + maxLength: 5 + setdate: + type: string + subsample: + type: number + subsamplepass: + type: number + subsamplen: + type: string + maxLength: 1 + recorder: + type: string + maxLength: 3 + gear: + type: string + maxLength: 10 + gearType: + type: string + maxLength: 1 + temp: + type: number + turbidity: + type: number + conductivity: + type: number + do: + type: number + distance: + type: number + width: + type: number + netrivermile: + type: number + structurenumber: + type: string + maxLength: 10 + usgs: + type: string + maxLength: 8 + riverstage: + type: number + discharge: + type: number + u1: + type: string + maxLength: 3 + u2: + type: string + maxLength: 3 + u3: + type: string + maxLength: 4 + u4: + type: string + maxLength: 4 + u5: + type: string + maxLength: 6 + u6: + type: string + maxLength: 6 + u7: + type: string + maxLength: 8 + macro: + type: string + maxLength: 4 + meso: + type: string + maxLength: 5 + habitatrn: + type: string + maxLength: 1 + qc: + type: string + maxLength: 1 + microStructure: + type: string + maxLength: 5 + structureFlow: + type: string + maxLength: 1 + structureMod: + type: string + maxLength: 1 + setSite_1: + type: string + maxLength: 5 + setSite_2: + type: string + maxLength: 1 + setSite_3: + type: string + maxLength: 1 + startTime: + type: string + maxLength: 11 + startLatitude: + type: number + startLongitude: + type: number + stopTime: + type: string + maxLength: 11 + stopLatitude: + type: number + stopLongitude: + type: number + depth1: + type: number + velocitybot1: + type: number + velocity08_1: + type: number + velocity02or06_1: + type: number + depth2: + type: number + velocitybot2: + type: number + velocity08_2: + type: number + velocity02or06_2: + type: number + depth3: + type: number + velocitybot3: + type: number + velocity08_3: + type: number + velocity02or06_3: + type: number + watervel: + type: number + cobble: + type: number + organic: + type: number + silt: + type: number + sand: + type: number + gravel: + type: number + comments: + type: string + maxLength: 2000 + complete: + type: number + checkby: + type: string + maxLength: 3 + noTurbidity: + type: string + maxLength: 1 + noVelocity: + type: string + maxLength: 1 uploadFilename: type: string + maxLength: 200 + errorCount: + year: + type: number + count: + type: number Error: type: object required: diff --git a/example.envars.list b/example.envars.list new file mode 100644 index 0000000..b48019d --- /dev/null +++ b/example.envars.list @@ -0,0 +1,7 @@ +DB_USER=PSPA +DB_PASS=ADD_PASWORD +DB_NAME=MAE1DEV +DB_HOST=host.docker.internal +DB_PORT=1522 +LIB_DIR=/usr/lib/instantclient +IPPK=${workspaceFolder}/props/local.pem \ No newline at end of file diff --git a/go.mod b/go.mod index ccb6b13..2b0ce7a 100644 --- a/go.mod +++ b/go.mod @@ -1,15 +1,38 @@ -module di2e.net/cwbi/pallid_sturgeon_api/server +module github.com/USACE/pallid_sturgeon_api/server -go 1.15 +go 1.23.2 require ( - github.com/dgrijalva/jwt-go v3.2.0+incompatible - github.com/godror/godror v0.25.1 - github.com/google/uuid v1.2.0 - github.com/jackc/pgx v3.6.2+incompatible - github.com/jmoiron/sqlx v1.2.0 - github.com/labstack/echo/v4 v4.1.17 - github.com/lib/pq v1.0.0 + github.com/godror/godror v0.44.8 + github.com/golang-jwt/jwt/v4 v4.5.0 + github.com/jackc/pgx/v4 v4.18.3 + github.com/jmoiron/sqlx v1.4.0 + github.com/labstack/echo/v4 v4.12.0 +) + +require ( + github.com/go-logfmt/logfmt v0.6.0 // indirect + github.com/godror/knownpb v0.2.0 // indirect + github.com/golang-jwt/jwt v3.2.2+incompatible // indirect + github.com/jackc/chunkreader/v2 v2.0.1 // indirect + github.com/jackc/pgconn v1.14.3 // indirect + github.com/jackc/pgio v1.0.0 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgproto3/v2 v2.3.3 // indirect + github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect + github.com/jackc/pgtype v1.14.3 // indirect + github.com/labstack/gommon v0.4.2 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect github.com/pkg/errors v0.9.1 // indirect - golang.org/x/sys v0.0.0-20200828194041-157a740278f4 // indirect + github.com/planetscale/vtprotobuf v0.6.0 // indirect + github.com/valyala/bytebufferpool v1.0.0 // indirect + github.com/valyala/fasttemplate v1.2.2 // indirect + golang.org/x/crypto v0.28.0 // indirect + golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect + golang.org/x/net v0.30.0 // indirect + golang.org/x/sys v0.26.0 // indirect + golang.org/x/text v0.19.0 // indirect + golang.org/x/time v0.7.0 // indirect + google.golang.org/protobuf v1.35.1 // indirect ) diff --git a/go.sum b/go.sum index 9e4ee1c..53bacbf 100644 --- a/go.sum +++ b/go.sum @@ -1,97 +1,276 @@ -github.com/auth0/go-jwt-middleware v1.0.0 h1:76t55qLQu3xjMFbkirbSCA8ZPcO1ny+20Uq1wkSTRDE= -github.com/auth0/go-jwt-middleware v1.0.0/go.mod h1:nX2S0GmCyl087kdNSSItfOvMYokq5PSTG1yGIP5Le4U= -github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcjuz89kmFXt9morQgcfYZAYZ5n8WHjt81YYWIwtTM= +filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= +filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/Masterminds/semver/v3 v3.1.1 h1:hLg3sBzpNErnxhQtUy/mmLR2I9foDujNK030IGemrRc= +github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= +github.com/UNO-SOFT/zlog v0.8.1 h1:TEFkGJHtUfTRgMkLZiAjLSHALjwSBdw6/zByMC5GJt4= +github.com/UNO-SOFT/zlog v0.8.1/go.mod h1:yqFOjn3OhvJ4j7ArJqQNA+9V+u6t9zSAyIZdWdMweWc= +github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= +github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= +github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM= -github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= -github.com/felixge/httpsnoop v1.0.1 h1:lvB5Jl89CsZtGIWuTcDM1E/vkVs49/Ml7JJe07l8SPQ= -github.com/felixge/httpsnoop v1.0.1/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/form3tech-oss/jwt-go v3.2.2+incompatible h1:TcekIExNqud5crz4xD2pavyTgWiPvpYe4Xau31I0PRk= -github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= -github.com/go-logfmt/logfmt v0.5.0 h1:TrB8swr/68K7m9CcGut2g3UOihhbcbiMAYiuTXdEih4= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= -github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8= -github.com/go-sql-driver/mysql v1.4.0 h1:7LxgVwFb2hIQtMm87NdgAVfXjnt4OePseqT1tKx+opk= -github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= -github.com/godror/godror v0.25.1 h1:g1sf7nfzhoz/ep00LAYcIRMnJmcC2g4DTf/xwjWHR+8= -github.com/godror/godror v0.25.1/go.mod h1:wZv/9vPiUib6tkoDl+AZ/QLf5YZgMravZ7jxH2eQWAE= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/uuid v1.2.0 h1:qJYtXnJRWmpe7m/3XlyhrsLrEURqHRM2kxzoxXqyUDs= -github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/gorilla/handlers v1.5.1 h1:9lRY6j8DEeeBT10CvO9hGW0gmky0BprnvDI5vfhUHH4= -github.com/gorilla/handlers v1.5.1/go.mod h1:t8XrUpc4KVXb7HGyJ4/cEnwQiaxrX/hz1Zv/4g96P1Q= -github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= -github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= -github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= -github.com/jackc/pgx v3.6.2+incompatible h1:2zP5OD7kiyR3xzRYMhOcXVvkDZsImVXfj+yIyTQf3/o= -github.com/jackc/pgx v3.6.2+incompatible/go.mod h1:0ZGrqGqkRlliWnWB4zKnWtjbSWbGkVEFm4TeybAXq+I= -github.com/jmoiron/sqlx v1.2.0 h1:41Ip0zITnmWNR/vHV+S4m+VoUivnWY5E4OJfLZjCJMA= -github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks= -github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= -github.com/kortschak/utter v1.0.1/go.mod h1:vSmSjbyrlKjjsL71193LmzBOKgwePk9DH6uFaWHIInc= -github.com/labstack/echo/v4 v4.1.17 h1:PQIBaRplyRy3OjwILGkPg89JRtH2x5bssi59G2EL3fo= -github.com/labstack/echo/v4 v4.1.17/go.mod h1:Tn2yRQL/UclUalpb5rPdXDevbkJ+lp/2svdyFBg6CHQ= -github.com/labstack/gommon v0.3.0 h1:JEeO0bvc78PKdyHxloTKiF8BD5iGrH8T6MSeGvSgob0= -github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= -github.com/lib/pq v1.0.0 h1:X5PMW56eZitiTeO7tKzZxFCSpbFZJtkMMooicw2us9A= +github.com/go-logfmt/logfmt v0.6.0 h1:wGYYu3uicYdqXVgoYbvnkrPVXkuLM1p1ifugDMEdRi4= +github.com/go-logfmt/logfmt v0.6.0/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= +github.com/go-logr/logr v1.2.4 h1:g01GSCwiDw2xSZfjJ2/T9M+S6pFdcNtFYsp+Y43HYDQ= +github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= +github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/godror/godror v0.44.8 h1:20AAK8BWZasXuRkX/vhbSpnAqBMXB9fngsdfMJ4pNgU= +github.com/godror/godror v0.44.8/go.mod h1:KJwMtQpK9o3WdEiNw7qvgSk827YDLj9MV/bXSzvUzlo= +github.com/godror/knownpb v0.2.0 h1:RJLntksFiKUHoUz3wCCJ8+DBjxSLYHYDNl1xRz0/gXI= +github.com/godror/knownpb v0.2.0/go.mod h1:kRahRJBwqTenpVPleymQ4k433Xz2Wuy7dOeFSuEpmkI= +github.com/gofrs/uuid v4.0.0+incompatible h1:1SD/1F5pU8p29ybwgQSwpQk+mwdRrXCYuPhW6m+TnJw= +github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= +github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= +github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= +github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= +github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= +github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA= +github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE= +github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s= +github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o= +github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY= +github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI= +github.com/jackc/pgconn v1.14.3 h1:bVoTr12EGANZz66nZPkMInAV/KHD2TxH9npjXXgiB3w= +github.com/jackc/pgconn v1.14.3/go.mod h1:RZbme4uasqzybK2RK5c65VsHxoyaml09lx3tXOcO/VM= +github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= +github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= +github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= +github.com/jackc/pgmock v0.0.0-20201204152224-4fe30f7445fd/go.mod h1:hrBW0Enj2AZTNpt/7Y5rr2xe/9Mn757Wtb2xeBzPv2c= +github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65 h1:DadwsjnMwFjfWc9y5Wi/+Zz7xoE5ALHsRQlOctkOiHc= +github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65/go.mod h1:5R2h2EEX+qri8jOWMbJCtaPWkrrNc7OHwsp2TCqp7ak= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= +github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.3.3 h1:1HLSx5H+tXR9pW3in3zaztoEwQYRC9SQaYUHjTSUOag= +github.com/jackc/pgproto3/v2 v2.3.3/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg= +github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc= +github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= +github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM= +github.com/jackc/pgtype v1.14.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= +github.com/jackc/pgtype v1.14.3 h1:h6W9cPuHsRWQFTWUZMAKMgG5jSwQI0Zurzdvlx3Plus= +github.com/jackc/pgtype v1.14.3/go.mod h1:aKeozOde08iifGosdJpz9MBZonJOUJxqNpPBcMJTlVA= +github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= +github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= +github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc= +github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs= +github.com/jackc/pgx/v4 v4.18.2/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw= +github.com/jackc/pgx/v4 v4.18.3 h1:dE2/TrEsGX3RBprb3qryqSV9Y60iZN1C6i8IrmW9/BA= +github.com/jackc/pgx/v4 v4.18.3/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw= +github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.3.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o= +github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/labstack/echo/v4 v4.12.0 h1:IKpw49IMryVB2p1a4dzwlhP1O2Tf2E0Ir/450lH+kI0= +github.com/labstack/echo/v4 v4.12.0/go.mod h1:UP9Cr2DJXbOK3Kr9ONYzNowSh7HP0aG0ShAyycHSJvM= +github.com/labstack/gommon v0.4.2 h1:F8qTUNXgG1+6WQmqoUWnz8WiEU60mXVVw0P4ht1WRA0= +github.com/labstack/gommon v0.4.2/go.mod h1:QlUFxVM+SNXhDL/Z7YhocGIBYOiwB0mXm1+1bAPHPyU= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/mattn/go-colorable v0.1.2 h1:/bC9yWikZXAL9uJdulbSfyVNIR3n3trXl+v8+1sx8mU= -github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-colorable v0.1.7 h1:bQGKb3vps/j0E9GfJQ03JyhRuxsvdAanXlT9BTw3mdw= -github.com/mattn/go-colorable v0.1.7/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.9 h1:d5US/mDsogSGW37IV293h//ZFaeajb69h+EHFsv2xGg= -github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= -github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= +github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= +github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-sqlite3 v1.9.0 h1:pDRiWfl+++eC2FEFRy6jXmQlvp4Yh3z1MJKg4UeYM/4= -github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU= +github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= +github.com/oklog/ulid/v2 v2.0.2 h1:r4fFzBm+bv0wNKNh5eXTwU7i85y5x+uwkxCUTNVQqLc= +github.com/oklog/ulid/v2 v2.0.2/go.mod h1:mtBL0Qe/0HAx6/a4Z30qxVIAL1eQDweXq5lxOEiwQ68= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/planetscale/vtprotobuf v0.6.0 h1:nBeETjudeJ5ZgBHUz1fVHvbqUKnYOXNhsIEabROxmNA= +github.com/planetscale/vtprotobuf v0.6.0/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik= -github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= -github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= -github.com/smartystreets/assertions v1.1.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= -github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= +github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= +github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= +github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= +github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= +github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ= +github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= -github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= -github.com/valyala/fasttemplate v1.2.1 h1:TVEnxayobAdVkhQfrfes2IzOB6o+z4roRkPF52WA1u4= -github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= +github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo= +github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= +go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a h1:vclmkQCjlDX5OydZ9wv8rBCcS0QyQY66Mpf/7BZbInM= -golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.20.0/go.mod h1:Xwo95rrVNIoSMx9wa1JroENMToLWn3RNVrTBpLHgZPQ= +golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw= +golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= +golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= +golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3 h1:0GoQqolDA55aaLxZyTzK/Y2ePZzZTUrRacwib7cNsYQ= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20200822124328-c89045814202 h1:VvcQYSHwXgi7W+TpUR6A9g6Up98WAHf3f/ulnJ62IyA= -golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4= +golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= +golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a h1:aYOabOQFp6Vj6W1F80affTUvO9UxmJRx8K0gsfABByQ= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200826173525-f9321e4c35a6/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200828194041-157a740278f4 h1:kCCpuwSAoYJPkNc6x0xT9yTtV4oKtARo4RGBQWOfg9E= -golang.org/x/sys v0.0.0-20200828194041-157a740278f4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= +golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= +golang.org/x/term v0.25.0 h1:WtHI/ltw4NvSUig5KARz9h521QvRC8RmF/cuYqifU24= +golang.org/x/term v0.25.0/go.mod h1:RPyXicDX+6vLxogjjRxjgD2TKtmAO6NZBsBRfrOLu7M= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM= +golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/time v0.7.0 h1:ntUhktv3OPE6TgYxXWv9vKvUSJyIFJlyohwbkEwPrKQ= +golang.org/x/time v0.7.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/protobuf v1.35.1 h1:m3LfL6/Ca+fqnjnlqQXNpFPABW1UD7mjh8KO2mKFytA= +google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= diff --git a/handlers/handler-utils.go b/handlers/handler-utils.go index 1cf30f7..7f4f8bc 100644 --- a/handlers/handler-utils.go +++ b/handlers/handler-utils.go @@ -1,100 +1,100 @@ package handlers import ( - // "strconv" - // "strings" + "strconv" + "strings" + "time" - // "di2e.net/cwbi/pallid_sturgeon_api/server/models" - // "github.com/labstack/echo/v4" + "github.com/USACE/pallid_sturgeon_api/server/models" + "github.com/labstack/echo/v4" ) -// func marshalQuery(c echo.Context) (models.SearchParams, error) { -// var page int = 0 -// var size int = 100 -// var orderby string = "id" -// var filter string = "" -// var phaseType string = "" -// var phaseStatus string = "" -// var err error - -// pageStr := c.QueryParam("page") - -// if pageStr != "" { -// page, err = strconv.Atoi(pageStr) -// if err != nil { -// return models.SearchParams{}, err -// } -// } - -// sizeStr := c.QueryParam("size") - -// if sizeStr != "" { -// size, err = strconv.Atoi(sizeStr) -// if err != nil { -// return models.SearchParams{}, err -// } -// } - -// ordebyString := c.QueryParam("orderby") - -// if ordebyString != "" && ordebyString != "undefined" { -// orderby = ordebyString -// } - -// filterStr := c.QueryParam("filter") - -// if filterStr != "" { -// filter = filterStr -// if strings.Contains(filterStr, "project_type") { -// filter = strings.ReplaceAll(filter, "project_type", "projt.type_name") -// } -// if strings.Contains(filterStr, "id ILIKE") { -// filter = strings.ReplaceAll(filter, "id ILIKE", "CAST(p.id AS TEXT) ILIKE") -// } -// if strings.Contains(filterStr, "id IN") { -// filter = strings.ReplaceAll(filter, "id IN", "p.id IN") -// } -// if strings.Contains(filterStr, "fiscal_year ILIKE") { -// filter = strings.ReplaceAll(filter, "fiscal_year ILIKE", "CAST(fiscal_year AS TEXT) ILIKE") -// } -// if strings.Contains(filterStr, "project_type") { -// filter = strings.ReplaceAll(filter, "project_type", "projt.type_name") -// } -// if strings.Contains(filterStr, "phase ILIKE") { -// filter = strings.ReplaceAll(filter, "phase ILIKE", "pht.phase_name ILIKE") -// } -// if strings.Contains(filterStr, "phase IN") { -// filter = strings.ReplaceAll(filter, "phase IN", "pht.phase_name IN") -// } -// if strings.Contains(filterStr, "project_manager") { -// filter = strings.ReplaceAll(filter, "project_manager", "pl.user_name") -// } -// if strings.Contains(filterStr, "lead_engineer") { -// filter = strings.ReplaceAll(filter, "lead_engineer", "le.user_name") -// } -// if strings.Contains(filterStr, "current_task") { -// filter = strings.ReplaceAll(filter, "current_task", "b.task_name") -// } -// if strings.Contains(filterStr, "percent_complete ILIKE") { -// filter = strings.ReplaceAll(filter, "percent_complete ILIKE", "CAST(percent_complete AS TEXT) ILIKE") -// } -// if strings.Contains(filterStr, "slippage ILIKE") { -// filter = strings.ReplaceAll(filter, "slippage ILIKE", "CAST((b.actual_date - b.projected_end_date) AS TEXT) ILIKE") -// } -// if strings.Contains(filterStr, "slippage IN") { -// filter = strings.ReplaceAll(filter, "slippage IN", "(b.actual_date - b.projected_end_date) IN") -// } -// } - -// phaseType = c.QueryParam("phaseType") -// phaseStatus = c.QueryParam("phaseStatus") - -// return models.SearchParams{ -// Page: page, -// PageSize: size, -// OrderBy: orderby, -// Filter: filter, -// PhaseType: phaseType, -// PhaseStatus: phaseStatus, -// }, nil -// } +func processTimeString(st string) time.Time { + t := time.Time{} + if len(st) > 0 { + if !strings.HasPrefix(st, "1") { + st = "0" + st + } + test, err := time.Parse("01/02/2006", st) + if err != nil { + t = time.Time{} + } + t = test + } + + return t +} + +func processStringTime(st string, ty string) *string { + t := new(string) + + if len(st) > 0 { + f := "1/2/2006" + + if ty == "app" { + f = "2006-01-02" + } + + test, err := time.Parse(f, st) + + if err != nil { + *t = "" + } else { + *t = test.Format("02-Jan-2006") + } + } + return t +} + +func DerefString(s *string) string { + if s != nil { + return *s + } + + return "" +} + +func marshalQuery(c echo.Context) (models.SearchParams, error) { + var page int = 0 + var size int = 20 + var orderby string = "" + var filter string = "" + var err error + + pageStr := c.QueryParam("page") + + if pageStr != "" { + page, err = strconv.Atoi(pageStr) + if err != nil { + return models.SearchParams{}, err + } + } + + sizeStr := c.QueryParam("size") + + if sizeStr != "" { + size, err = strconv.Atoi(sizeStr) + if err != nil { + return models.SearchParams{}, err + } + } + + orderbyString := c.QueryParam("orderby") + + if orderbyString != "" && orderbyString != "undefined" { + orderby = orderbyString + } + + filterString := c.QueryParam("filter") + + if filterString != "" && filterString != "undefined" { + filter = filterString + } + + return models.SearchParams{ + Page: page, + PageSize: size, + OrderBy: orderby, + Filter: filter, + }, nil +} diff --git a/handlers/pallidsturgeonhandler.go b/handlers/pallidsturgeonhandler.go index a17ca47..3e1b42c 100644 --- a/handlers/pallidsturgeonhandler.go +++ b/handlers/pallidsturgeonhandler.go @@ -2,10 +2,12 @@ package handlers import ( "net/http" + "os" + "strconv" "time" - "di2e.net/cwbi/pallid_sturgeon_api/server/models" - "di2e.net/cwbi/pallid_sturgeon_api/server/stores" + "github.com/USACE/pallid_sturgeon_api/server/models" + "github.com/USACE/pallid_sturgeon_api/server/stores" "github.com/labstack/echo/v4" ) @@ -14,209 +16,1270 @@ type PallidSturgeonHandler struct { } func (ps *PallidSturgeonHandler) Version(c echo.Context) error { - return c.String(http.StatusOK, "Pallid Sturgeon API v0.01") + return c.String(http.StatusOK, "Pallid Sturgeon API v0.02") +} + +func (sd *PallidSturgeonHandler) GetProjects(c echo.Context) error { + id := c.QueryParam("id") + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + projects, err := sd.Store.GetProjects(userInfo.OfficeCode) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + return c.JSON(http.StatusOK, projects) +} + +func (sd *PallidSturgeonHandler) GetProjectsFilter(c echo.Context) error { + project := c.QueryParam("project") + + projects, err := sd.Store.GetProjectsFilter(project) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, projects) +} + +func (sd *PallidSturgeonHandler) GetRoles(c echo.Context) error { + roles, err := sd.Store.GetRoles() + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, roles) +} + +func (sd *PallidSturgeonHandler) GetFieldOffices(c echo.Context) error { + showAll := c.QueryParam("showAll") + fieldOffices, err := sd.Store.GetFieldOffices(showAll) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, fieldOffices) } func (sd *PallidSturgeonHandler) GetSeasons(c echo.Context) error { - seasons, err := sd.Store.GetSeasons() + year, office, project := c.QueryParam("year"), c.QueryParam("office"), c.QueryParam("project") + seasons, err := sd.Store.GetSeasons(year, office, project) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, seasons) +} + +func (sd *PallidSturgeonHandler) GetSampleUnitTypes(c echo.Context) error { + sampleUnitTypes, err := sd.Store.GetSampleUnitTypes() + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, sampleUnitTypes) +} + +func (sd *PallidSturgeonHandler) GetSegments(c echo.Context) error { + office, project := c.QueryParam("office"), c.QueryParam("project") + segments, err := sd.Store.GetSegments(office, project) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, segments) +} + +func (sd *PallidSturgeonHandler) GetSampleUnit(c echo.Context) error { + sampleUnitType, segment := c.QueryParam("sampleUnitType"), c.QueryParam("segment") + bends, err := sd.Store.GetSampleUnit(sampleUnitType, segment) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, bends) +} + +func (sd *PallidSturgeonHandler) GetBendRn(c echo.Context) error { + bends, err := sd.Store.GetBendRn() + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, bends) +} + +func (sd *PallidSturgeonHandler) GetMeso(c echo.Context) error { + macro := c.QueryParam("macro") + mesoItems, err := sd.Store.GetMeso(macro) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, mesoItems) +} + +func (sd *PallidSturgeonHandler) GetStructureFlow(c echo.Context) error { + microStructure := c.QueryParam("microStructure") + structureFlowItems, err := sd.Store.GetStructureFlow(microStructure) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, structureFlowItems) +} + +func (sd *PallidSturgeonHandler) GetStructureMod(c echo.Context) error { + structureFlow := c.QueryParam("structureFlow") + structureModItems, err := sd.Store.GetStructureMod(structureFlow) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, structureModItems) +} + +func (sd *PallidSturgeonHandler) GetSpecies(c echo.Context) error { + species, err := sd.Store.GetSpecies() + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, species) +} + +func (sd *PallidSturgeonHandler) GetFtPrefixes(c echo.Context) error { + ftPrefixes, err := sd.Store.GetFtPrefixes() + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, ftPrefixes) +} + +func (sd *PallidSturgeonHandler) GetMr(c echo.Context) error { + mr, err := sd.Store.GetMr() + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, mr) +} + +func (sd *PallidSturgeonHandler) GetOtolith(c echo.Context) error { + otolith, err := sd.Store.GetOtolith() + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, otolith) +} + +func (sd *PallidSturgeonHandler) GetSetSite1(c echo.Context) error { + microstructure := c.QueryParam("microstructure") + setSiteItems, err := sd.Store.GetSetSite1(microstructure) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, setSiteItems) +} + +func (sd *PallidSturgeonHandler) GetSetSite2(c echo.Context) error { + setsite1 := c.QueryParam("setsite1") + setSiteItems, err := sd.Store.GetSetSite2(setsite1) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, setSiteItems) +} + +func (sd *PallidSturgeonHandler) GetYears(c echo.Context) error { + year, err := sd.Store.GetYears() + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, year) +} + +func (sd *PallidSturgeonHandler) GetSiteDataEntries(c echo.Context) error { + id, year, projectCode, segmentCode, seasonCode, bendrn, siteId := c.QueryParam("id"), c.QueryParam("year"), c.QueryParam("project"), c.QueryParam("segmentCode"), c.QueryParam("seasonCode"), c.QueryParam("bendrn"), c.QueryParam("siteId") + queryParams, err := marshalQuery(c) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + siteDataEntries, err := sd.Store.GetSiteDataEntries(siteId, year, userInfo.OfficeCode, projectCode, segmentCode, seasonCode, bendrn, queryParams) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, siteDataEntries) +} + +func (sd *PallidSturgeonHandler) SaveSiteDataEntry(c echo.Context) error { + code, sampleUnitType, segment := c.QueryParam("code"), c.QueryParam("sampleUnitType"), c.QueryParam("segment") + siteData := models.Sites{} + if err := c.Bind(&siteData); err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + siteData.LastUpdated = time.Now() + user := c.Get("PSUSER").(models.User) + siteData.UploadedBy = user.FirstName + " " + user.LastName + id, err := sd.Store.SaveSiteDataEntry(code, sampleUnitType, segment, siteData) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + return c.JSON(200, id) +} + +func (sd *PallidSturgeonHandler) UpdateSiteDataEntry(c echo.Context) error { + + siteData := models.Sites{} + if err := c.Bind(&siteData); err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + siteData.LastUpdated = time.Now() + user := c.Get("PSUSER").(models.User) + siteData.UploadedBy = user.FirstName + " " + user.LastName + err := sd.Store.UpdateSiteDataEntry(siteData) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, `{"result":"success"}`) +} + +func (sd *PallidSturgeonHandler) GetFishDataEntries(c echo.Context) error { + id, tableId, fieldId, mrId := c.QueryParam("id"), c.QueryParam("tableId"), c.QueryParam("fieldId"), c.QueryParam("mrId") + queryParams, err := marshalQuery(c) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + dataSummary, err := sd.Store.GetFishDataEntries(tableId, fieldId, mrId, userInfo.OfficeCode, queryParams) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, dataSummary) +} + +func (sd *PallidSturgeonHandler) SaveFishDataEntry(c echo.Context) error { + fishData := models.UploadFish{} + if err := c.Bind(&fishData); err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + fishData.LastUpdated = time.Now() + user := c.Get("PSUSER").(models.User) + fishData.UploadedBy = user.FirstName + " " + user.LastName + id, err := sd.Store.SaveFishDataEntry(fishData) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + return c.JSON(200, id) +} + +func (sd *PallidSturgeonHandler) UpdateFishDataEntry(c echo.Context) error { + + fishData := models.UploadFish{} + if err := c.Bind(&fishData); err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + fishData.LastUpdated = time.Now() + user := c.Get("PSUSER").(models.User) + fishData.UploadedBy = user.FirstName + " " + user.LastName + err := sd.Store.UpdateFishDataEntry(fishData) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, `{"result":"success"}`) +} + +func (sd *PallidSturgeonHandler) DeleteFishDataEntry(c echo.Context) error { + id := c.Param("id") + + err := sd.Store.DeleteFishDataEntry(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + return c.JSON(http.StatusOK, `{"result":"success"}`) +} + +func (sd *PallidSturgeonHandler) GetMoriverDataEntries(c echo.Context) error { + tableId, fieldId := c.QueryParam("tableId"), c.QueryParam("fieldId") + queryParams, err := marshalQuery(c) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + user := c.Get("PSUSER").(models.User) + + userInfo, err := sd.Store.GetUser(user.Email) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + dataSummary, err := sd.Store.GetMoriverDataEntries(tableId, fieldId, userInfo.OfficeCode, queryParams) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, dataSummary) +} + +func (sd *PallidSturgeonHandler) SaveMoriverDataEntry(c echo.Context) error { + moriverData := models.UploadMoriver{} + if err := c.Bind(&moriverData); err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + moriverData.LastUpdated = time.Now() + user := c.Get("PSUSER").(models.User) + moriverData.UploadedBy = user.FirstName + " " + user.LastName + moriverData.SetDate = processStringTime(DerefString(moriverData.SetDate), "app") + id, err := sd.Store.SaveMoriverDataEntry(moriverData) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + return c.JSON(200, id) +} + +func (sd *PallidSturgeonHandler) UpdateMoriverDataEntry(c echo.Context) error { + moriverData := models.UploadMoriver{} + if err := c.Bind(&moriverData); err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + moriverData.LastUpdated = time.Now() + user := c.Get("PSUSER").(models.User) + moriverData.UploadedBy = user.FirstName + " " + user.LastName + moriverData.SetDate = processStringTime(DerefString(moriverData.SetDate), "app") + err := sd.Store.UpdateMoriverDataEntry(moriverData) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + return c.JSON(http.StatusOK, `{"result":"success"}`) +} + +func (sd *PallidSturgeonHandler) GetSupplementalDataEntries(c echo.Context) error { + id, tableId, fieldId, geneticsVial, pitTag, mrId, fId := c.QueryParam("id"), c.QueryParam("tableId"), c.QueryParam("fieldId"), c.QueryParam("geneticsVial"), c.QueryParam("pitTag"), c.QueryParam("mrId"), c.QueryParam("fId") + queryParams, err := marshalQuery(c) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + dataSummary, err := sd.Store.GetSupplementalDataEntries(tableId, fieldId, geneticsVial, pitTag, mrId, fId, userInfo.OfficeCode, queryParams) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, dataSummary) +} + +func (sd *PallidSturgeonHandler) SaveSupplementalDataEntry(c echo.Context) error { + supplementalData := models.UploadSupplemental{} + if err := c.Bind(&supplementalData); err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + supplementalData.LastUpdated = time.Now() + user := c.Get("PSUSER").(models.User) + supplementalData.UploadedBy = user.FirstName + " " + user.LastName + id, err := sd.Store.SaveSupplementalDataEntry(supplementalData) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + return c.JSON(200, id) +} + +func (sd *PallidSturgeonHandler) UpdateSupplementalDataEntry(c echo.Context) error { + supplementalData := models.UploadSupplemental{} + if err := c.Bind(&supplementalData); err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + supplementalData.LastUpdated = time.Now() + user := c.Get("PSUSER").(models.User) + supplementalData.UploadedBy = user.FirstName + " " + user.LastName + err := sd.Store.UpdateSupplementalDataEntry(supplementalData) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, `{"result":"success"}`) +} + +func (sd *PallidSturgeonHandler) DeleteSupplementalDataEntry(c echo.Context) error { + id := c.Param("id") + + err := sd.Store.DeleteSupplementalDataEntry(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + return c.JSON(http.StatusOK, `{"result":"success"}`) +} + +func (sd *PallidSturgeonHandler) GetSearchDataEntries(c echo.Context) error { + tableId, siteId := c.QueryParam("tableId"), c.QueryParam("siteId") + queryParams, err := marshalQuery(c) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + dataSummary, err := sd.Store.GetSearchDataEntries(tableId, siteId, queryParams) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, dataSummary) +} + +func (sd *PallidSturgeonHandler) SaveSearchDataEntry(c echo.Context) error { + searchData := models.UploadSearch{} + if err := c.Bind(&searchData); err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + searchData.LastUpdated = time.Now() + user := c.Get("PSUSER").(models.User) + searchData.UploadedBy = user.FirstName + " " + user.LastName + searchData.SearchDate = processStringTime(DerefString(searchData.SearchDate), "app") + id, err := sd.Store.SaveSearchDataEntry(searchData) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + return c.JSON(200, id) +} + +func (sd *PallidSturgeonHandler) UpdateSearchDataEntry(c echo.Context) error { + searchData := models.UploadSearch{} + if err := c.Bind(&searchData); err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + searchData.LastUpdated = time.Now() + user := c.Get("PSUSER").(models.User) + searchData.UploadedBy = user.FirstName + " " + user.LastName + searchData.SearchDate = processStringTime(DerefString(searchData.SearchDate), "app") + err := sd.Store.UpdateSearchDataEntry(searchData) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, `{"result":"success"}`) +} + +func (sd *PallidSturgeonHandler) GetProcedureDataEntries(c echo.Context) error { + id, tableId, fId, mrId := c.QueryParam("id"), c.QueryParam("tableId"), c.QueryParam("fId"), c.QueryParam("mrId") + queryParams, err := marshalQuery(c) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + dataSummary, err := sd.Store.GetProcedureDataEntries(tableId, fId, mrId, userInfo.OfficeCode, queryParams) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, dataSummary) +} + +func (sd *PallidSturgeonHandler) SaveProcedureDataEntry(c echo.Context) error { + procedureData := models.UploadProcedure{} + if err := c.Bind(&procedureData); err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + procedureData.LastUpdated = time.Now() + user := c.Get("PSUSER").(models.User) + procedureData.UploadedBy = user.FirstName + " " + user.LastName + procedureData.ProcedureDate = processStringTime(DerefString(procedureData.ProcedureDate), "app") + procedureData.DstStartDate = processStringTime(DerefString(procedureData.DstStartDate), "app") + id, err := sd.Store.SaveProcedureDataEntry(procedureData) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + return c.JSON(200, id) +} + +func (sd *PallidSturgeonHandler) UpdateProcedureDataEntry(c echo.Context) error { + procedureData := models.UploadProcedure{} + if err := c.Bind(&procedureData); err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + procedureData.LastUpdated = time.Now() + user := c.Get("PSUSER").(models.User) + procedureData.UploadedBy = user.FirstName + " " + user.LastName + procedureData.ProcedureDate = processStringTime(DerefString(procedureData.ProcedureDate), "app") + procedureData.DstStartDate = processStringTime(DerefString(procedureData.DstStartDate), "app") + err := sd.Store.UpdateProcedureDataEntry(procedureData) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, `{"result":"success"}`) +} + +func (sd *PallidSturgeonHandler) DeleteProcedureDataEntry(c echo.Context) error { + id := c.Param("id") + + err := sd.Store.DeleteProcedureDataEntry(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + return c.JSON(http.StatusOK, "successfully deleted procedure data entry id "+id) +} + +func (sd *PallidSturgeonHandler) GetTelemetryDataEntries(c echo.Context) error { + id, tableId, seId := c.QueryParam("id"), c.QueryParam("tableId"), c.QueryParam("seId") + queryParams, err := marshalQuery(c) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + dataSummary, err := sd.Store.GetTelemetryDataEntries(tableId, seId, userInfo.OfficeCode, queryParams) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, dataSummary) +} + +func (sd *PallidSturgeonHandler) SaveTelemetryDataEntry(c echo.Context) error { + telemetryData := models.UploadTelemetry{} + if err := c.Bind(&telemetryData); err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + telemetryData.LastUpdated = time.Now() + user := c.Get("PSUSER").(models.User) + telemetryData.UploadedBy = user.FirstName + " " + user.LastName + id, err := sd.Store.SaveTelemetryDataEntry(telemetryData) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + return c.JSON(200, id) +} + +func (sd *PallidSturgeonHandler) UpdateTelemetryDataEntry(c echo.Context) error { + telemetryData := models.UploadTelemetry{} + if err := c.Bind(&telemetryData); err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + telemetryData.LastUpdated = time.Now() + user := c.Get("PSUSER").(models.User) + telemetryData.UploadedBy = user.FirstName + " " + user.LastName + err := sd.Store.UpdateTelemetryDataEntry(telemetryData) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, `{"result":"success"}`) +} + +func (sd *PallidSturgeonHandler) DeleteTelemetryDataEntry(c echo.Context) error { + id := c.Param("id") + + err := sd.Store.DeleteTelemetryDataEntry(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + return c.JSON(http.StatusOK, `{"result":"success"}`) +} + +func (sd *PallidSturgeonHandler) GetFullFishDataSummary(c echo.Context) error { + id, year, project, approved, season, spice, month, fromDate, toDate := c.QueryParam("id"), c.QueryParam("year"), c.QueryParam("project"), c.QueryParam("approved"), c.QueryParam("season"), c.QueryParam("spice"), c.QueryParam("month"), c.QueryParam("fromDate"), c.QueryParam("toDate") + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + // set project + projectVal := "" + if userInfo.ProjectCode == "2" { + projectVal = "2" + } else { + projectVal = project + } + + fileName, err := sd.Store.GetFullFishDataSummary(year, userInfo.OfficeCode, projectVal, approved, season, spice, month, fromDate, toDate) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + defer os.Remove(fileName) + return c.Inline(fileName, fileName) +} + +func (sd *PallidSturgeonHandler) GetFishDataSummary(c echo.Context) error { + id, year, project, approved, season, spice, month, fromDate, toDate := c.QueryParam("id"), c.QueryParam("year"), c.QueryParam("project"), c.QueryParam("approved"), c.QueryParam("season"), c.QueryParam("spice"), c.QueryParam("month"), c.QueryParam("fromDate"), c.QueryParam("toDate") + queryParams, err := marshalQuery(c) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + // set project + projectVal := "" + if userInfo.ProjectCode == "2" { + projectVal = "2" + } else { + projectVal = project + } + + dataSummary, err := sd.Store.GetFishDataSummary(year, userInfo.OfficeCode, projectVal, approved, season, spice, month, fromDate, toDate, queryParams) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, dataSummary) +} + +func (sd *PallidSturgeonHandler) GetFullSuppDataSummary(c echo.Context) error { + id, year, project, approved, season, spice, month, fromDate, toDate := c.QueryParam("id"), c.QueryParam("year"), c.QueryParam("project"), c.QueryParam("approved"), c.QueryParam("season"), c.QueryParam("spice"), c.QueryParam("month"), c.QueryParam("fromDate"), c.QueryParam("toDate") + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + // set project + projectVal := "" + if userInfo.ProjectCode == "2" { + projectVal = "2" + } else { + projectVal = project + } + + fileName, err := sd.Store.GetFullSuppDataSummary(year, userInfo.OfficeCode, projectVal, approved, season, spice, month, fromDate, toDate) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + defer os.Remove(fileName) + return c.Inline(fileName, fileName) +} + +func (sd *PallidSturgeonHandler) GetSuppDataSummary(c echo.Context) error { + id, year, project, approved, season, spice, month, fromDate, toDate := c.QueryParam("id"), c.QueryParam("year"), c.QueryParam("project"), c.QueryParam("approved"), c.QueryParam("season"), c.QueryParam("spice"), c.QueryParam("month"), c.QueryParam("fromDate"), c.QueryParam("toDate") + queryParams, err := marshalQuery(c) if err != nil { - return err + return c.JSON(http.StatusInternalServerError, err.Error()) } - return c.JSON(http.StatusOK, seasons) + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + // set project + projectVal := "" + if userInfo.ProjectCode == "2" { + projectVal = "2" + } else { + projectVal = project + } + + dataSummary, err := sd.Store.GetSuppDataSummary(year, userInfo.OfficeCode, projectVal, approved, season, spice, month, fromDate, toDate, queryParams) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, dataSummary) +} + +func (sd *PallidSturgeonHandler) GetFullMissouriDataSummary(c echo.Context) error { + id, project, year, approved, season, spice, month, fromDate, toDate := c.QueryParam("id"), c.QueryParam("project"), c.QueryParam("year"), c.QueryParam("approved"), c.QueryParam("season"), c.QueryParam("spice"), c.QueryParam("month"), c.QueryParam("fromDate"), c.QueryParam("toDate") + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + // set project + projectVal := "" + if userInfo.ProjectCode == "2" { + projectVal = "2" + } else { + projectVal = project + } + + fileName, err := sd.Store.GetFullMissouriDataSummary(year, userInfo.OfficeCode, projectVal, approved, season, spice, month, fromDate, toDate) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + defer os.Remove(fileName) + return c.Inline(fileName, fileName) +} + +func (sd *PallidSturgeonHandler) GetMissouriDataSummary(c echo.Context) error { + id, project, year, approved, season, spice, month, fromDate, toDate := c.QueryParam("id"), c.QueryParam("project"), c.QueryParam("year"), c.QueryParam("approved"), c.QueryParam("season"), c.QueryParam("spice"), c.QueryParam("month"), c.QueryParam("fromDate"), c.QueryParam("toDate") + queryParams, err := marshalQuery(c) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + // set project + projectVal := "" + if userInfo.ProjectCode == "2" { + projectVal = "2" + } else { + projectVal = project + } + + dataSummary, err := sd.Store.GetMissouriDataSummary(year, userInfo.OfficeCode, projectVal, approved, season, spice, month, fromDate, toDate, queryParams) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + return c.JSON(http.StatusOK, dataSummary) +} + +func (sd *PallidSturgeonHandler) GetFullGeneticDataSummary(c echo.Context) error { + id, year, project, fromDate, toDate, broodstock, hatchwild, speciesId, archive := c.QueryParam("id"), c.QueryParam("year"), c.QueryParam("project"), c.QueryParam("fromDate"), c.QueryParam("toDate"), c.QueryParam("broodstock"), c.QueryParam("hatchwild"), c.QueryParam("speciesId"), c.QueryParam("archive") + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + // set project + projectVal := "" + if userInfo.ProjectCode == "2" { + projectVal = "2" + } else { + projectVal = project + } + + fileName, err := sd.Store.GetFullGeneticDataSummary(year, userInfo.OfficeCode, projectVal, fromDate, toDate, broodstock, hatchwild, speciesId, archive) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + defer os.Remove(fileName) + return c.Inline(fileName, fileName) +} + +func (sd *PallidSturgeonHandler) GetGeneticDataSummary(c echo.Context) error { + id, year, project, fromDate, toDate, broodstock, hatchwild, speciesId, archive := c.QueryParam("id"), c.QueryParam("year"), c.QueryParam("project"), c.QueryParam("fromDate"), c.QueryParam("toDate"), c.QueryParam("broodstock"), c.QueryParam("hatchwild"), c.QueryParam("speciesId"), c.QueryParam("archive") + queryParams, err := marshalQuery(c) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + // set project + projectVal := "" + if userInfo.ProjectCode == "2" { + projectVal = "2" + } else { + projectVal = project + } + + dataSummary, err := sd.Store.GetGeneticDataSummary(year, userInfo.OfficeCode, projectVal, fromDate, toDate, broodstock, hatchwild, speciesId, archive, queryParams) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, dataSummary) +} + +func (sd *PallidSturgeonHandler) GetFullSearchDataSummary(c echo.Context) error { + id, year, project, approved, season, segment, month, fromDate, toDate := c.QueryParam("id"), c.QueryParam("year"), c.QueryParam("project"), c.QueryParam("approved"), c.QueryParam("season"), c.QueryParam("segment"), c.QueryParam("month"), c.QueryParam("fromDate"), c.QueryParam("toDate") + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + // set project + projectVal := "" + if userInfo.ProjectCode == "2" { + projectVal = "2" + } else { + projectVal = project + } + + fileName, err := sd.Store.GetFullSearchDataSummary(year, userInfo.OfficeCode, projectVal, approved, season, segment, month, fromDate, toDate) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + defer os.Remove(fileName) + return c.Inline(fileName, fileName) +} + +func (sd *PallidSturgeonHandler) GetSearchDataSummary(c echo.Context) error { + id, year, project, approved, season, segment, month, fromDate, toDate := c.QueryParam("id"), c.QueryParam("year"), c.QueryParam("project"), c.QueryParam("approved"), c.QueryParam("season"), c.QueryParam("segment"), c.QueryParam("month"), c.QueryParam("fromDate"), c.QueryParam("toDate") + queryParams, err := marshalQuery(c) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + // set project + projectVal := "" + if userInfo.ProjectCode == "2" { + projectVal = "2" + } else { + projectVal = project + } + + dataSummary, err := sd.Store.GetSearchDataSummary(year, userInfo.OfficeCode, projectVal, approved, season, segment, month, fromDate, toDate, queryParams) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, dataSummary) +} + +func (sd *PallidSturgeonHandler) GetTelemetryDataSummary(c echo.Context) error { + id, year, project, approved, season, spice, month, fromDate, toDate := c.QueryParam("id"), c.QueryParam("year"), c.QueryParam("project"), c.QueryParam("approved"), c.QueryParam("season"), c.QueryParam("spice"), c.QueryParam("month"), c.QueryParam("fromDate"), c.QueryParam("toDate") + queryParams, err := marshalQuery(c) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + // set project + projectVal := "" + if userInfo.ProjectCode == "2" { + projectVal = "2" + } else { + projectVal = project + } + + dataSummary, err := sd.Store.GetTelemetryDataSummary(year, userInfo.OfficeCode, projectVal, approved, season, spice, month, fromDate, toDate, queryParams) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, dataSummary) +} + +func (sd *PallidSturgeonHandler) GetFullTelemetryDataSummary(c echo.Context) error { + id, year, project, approved, season, spice, month, fromDate, toDate := c.QueryParam("id"), c.QueryParam("year"), c.QueryParam("project"), c.QueryParam("approved"), c.QueryParam("season"), c.QueryParam("spice"), c.QueryParam("month"), c.QueryParam("fromDate"), c.QueryParam("toDate") + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + // set project + projectVal := "" + if userInfo.ProjectCode == "2" { + projectVal = "2" + } else { + projectVal = project + } + + fileName, err := sd.Store.GetFullTelemetryDataSummary(year, userInfo.OfficeCode, projectVal, approved, season, spice, month, fromDate, toDate) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + defer os.Remove(fileName) + return c.Inline(fileName, fileName) +} + +func (sd *PallidSturgeonHandler) GetProcedureDataSummary(c echo.Context) error { + id, year, project, approved, season, spice, month, fromDate, toDate := c.QueryParam("id"), c.QueryParam("year"), c.QueryParam("project"), c.QueryParam("approved"), c.QueryParam("season"), c.QueryParam("spice"), c.QueryParam("month"), c.QueryParam("fromDate"), c.QueryParam("toDate") + queryParams, err := marshalQuery(c) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + // set project + projectVal := "" + if userInfo.ProjectCode == "2" { + projectVal = "2" + } else { + projectVal = project + } + + dataSummary, err := sd.Store.GetProcedureDataSummary(year, userInfo.OfficeCode, projectVal, approved, season, spice, month, fromDate, toDate, queryParams) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, dataSummary) +} + +func (sd *PallidSturgeonHandler) GetFullProcedureDataSummary(c echo.Context) error { + id, year, project, approved, season, spice, month, fromDate, toDate := c.QueryParam("id"), c.QueryParam("year"), c.QueryParam("project"), c.QueryParam("approved"), c.QueryParam("season"), c.QueryParam("spice"), c.QueryParam("month"), c.QueryParam("fromDate"), c.QueryParam("toDate") + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + // set project + projectVal := "" + if userInfo.ProjectCode == "2" { + projectVal = "2" + } else { + projectVal = project + } + + fileName, err := sd.Store.GetFullProcedureDataSummary(year, userInfo.OfficeCode, projectVal, approved, season, spice, month, fromDate, toDate) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + defer os.Remove(fileName) + return c.Inline(fileName, fileName) +} + +func (sd *PallidSturgeonHandler) GetMissouriDatasheetById(c echo.Context) error { + id, siteId, project, segment, season, bend := c.QueryParam("id"), c.QueryParam("siteId"), c.QueryParam("project"), c.QueryParam("segment"), c.QueryParam("season"), c.QueryParam("bend") + queryParams, err := marshalQuery(c) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + // set project + projectVal := "" + if userInfo.ProjectCode == "2" { + projectVal = "2" + } else { + projectVal = project + } + + missouriData, err := sd.Store.GetMissouriDatasheetById(siteId, userInfo.OfficeCode, projectVal, segment, season, bend, queryParams) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + return c.JSON(http.StatusOK, missouriData) +} + +func (sd *PallidSturgeonHandler) GetSearchDatasheetById(c echo.Context) error { + siteId := c.QueryParam("siteId") + queryParams, err := marshalQuery(c) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + searchData, err := sd.Store.GetSearchDatasheetById(siteId, queryParams) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + return c.JSON(http.StatusOK, searchData) +} + +func (sd *PallidSturgeonHandler) GetUploadSessionId(c echo.Context) error { + sessionId, err := sd.Store.GetUploadSessionId() + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, sessionId) } -func (sd *PallidSturgeonHandler) SiteUpload(c echo.Context) error { +func (sd *PallidSturgeonHandler) Upload(c echo.Context) error { var err error - uploadSites := []models.UploadSite{} - if err := c.Bind(&uploadSites); err != nil { - return err + uploads := models.Upload{} + if err := c.Bind(&uploads); err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + sessionId, err := sd.Store.GetUploadSessionId() + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) } - for _, uploadSite := range uploadSites { + + user := c.Get("PSUSER").(models.User) + + for _, uploadSite := range uploads.SiteUpload.Items { uploadSite.LastUpdated = time.Now() - uploadSite.UploadedBy = "DeeLiang" + uploadSite.UploadedBy = user.FirstName + " " + user.LastName + uploadSite.UploadSessionId = sessionId + uploadSite.EditInitials = uploads.EditInitials + uploadSite.UploadFilename = uploads.SiteUpload.UploadFilename err = sd.Store.SaveSiteUpload(uploadSite) if err != nil { - return err + return c.JSON(http.StatusInternalServerError, err.Error()) } + } - err = sd.Store.UploadSiteDatasheetCheck(uploadSite.UploadedBy, uploadSite.UploadSessionId) + for _, uploadFish := range uploads.FishUpload.Items { + uploadFish.LastUpdated = time.Now() + uploadFish.UploadedBy = user.FirstName + " " + user.LastName + uploadFish.UploadSessionId = sessionId + uploadFish.EditInitials = uploads.EditInitials + uploadFish.UploadFilename = uploads.FishUpload.UploadFilename + err = sd.Store.SaveFishUpload(uploadFish) if err != nil { - return err + return c.JSON(http.StatusInternalServerError, err.Error()) } + } - err = sd.Store.UploadSiteDatasheet(uploadSite.UploadedBy) + for _, uploadSearch := range uploads.SearchUpload.Items { + uploadSearch.SearchDate = processStringTime(DerefString(uploadSearch.SearchDate), "db") + uploadSearch.LastUpdated = time.Now() + uploadSearch.UploadedBy = user.FirstName + " " + user.LastName + uploadSearch.UploadSessionId = sessionId + uploadSearch.EditInitials = uploads.EditInitials + uploadSearch.UploadFilename = uploads.SearchUpload.UploadFilename + err = sd.Store.SaveSearchUpload(uploadSearch) if err != nil { - return err + return c.JSON(http.StatusInternalServerError, err.Error()) } } - return c.JSON(http.StatusOK, `{"result":"success"}`) -} - -func (sd *PallidSturgeonHandler) FishUpload(c echo.Context) error { - var err error - uploadFishs := []models.UploadFish{} - if err := c.Bind(&uploadFishs); err != nil { - return err + for _, uploadSupplemental := range uploads.SupplementalUpload.Items { + uploadSupplemental.LastUpdated = time.Now() + uploadSupplemental.UploadedBy = user.FirstName + " " + user.LastName + uploadSupplemental.UploadSessionId = sessionId + uploadSupplemental.EditInitials = uploads.EditInitials + uploadSupplemental.UploadFilename = uploads.SupplementalUpload.UploadFilename + err = sd.Store.SaveSupplementalUpload(uploadSupplemental) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } } - for _, uploadFish := range uploadFishs { - uploadFish.LastUpdated = time.Now() - uploadFish.UploadedBy = "DeeLiang" - err = sd.Store.SaveFishUpload(uploadFish) + + for _, uploadProcedure := range uploads.ProcedureUpload.Items { + uploadProcedure.ProcedureDate = processStringTime(DerefString(uploadProcedure.ProcedureDate), "db") + uploadProcedure.DstStartDate = processStringTime(DerefString(uploadProcedure.DstStartDate), "db") + uploadProcedure.LastUpdated = time.Now() + uploadProcedure.UploadedBy = user.FirstName + " " + user.LastName + uploadProcedure.UploadSessionId = sessionId + uploadProcedure.EditInitials = uploads.EditInitials + uploadProcedure.UploadFilename = uploads.ProcedureUpload.UploadFilename + err = sd.Store.SaveProcedureUpload(uploadProcedure) if err != nil { - return err + return c.JSON(http.StatusInternalServerError, err.Error()) } + } - err = sd.Store.UploadFishDatasheetCheck(uploadFish.UploadedBy, uploadFish.UploadSessionId) + for _, uploadMoriver := range uploads.MoriverUpload.Items { + uploadMoriver.SetDate = processStringTime(DerefString(uploadMoriver.SetDate), "db") + uploadMoriver.LastUpdated = time.Now() + uploadMoriver.UploadedBy = user.FirstName + " " + user.LastName + uploadMoriver.UploadSessionId = sessionId + uploadMoriver.EditInitials = uploads.EditInitials + uploadMoriver.UploadFilename = uploads.MoriverUpload.UploadFilename + err = sd.Store.SaveMoriverUpload(uploadMoriver) if err != nil { - return err + return c.JSON(http.StatusInternalServerError, err.Error()) } + } - err = sd.Store.UploadFishDatasheet(uploadFish.UploadedBy) + for _, uploadTelemetry := range uploads.TelemetryUpload.Items { + uploadTelemetry.LastUpdated = time.Now() + uploadTelemetry.UploadedBy = user.FirstName + " " + user.LastName + uploadTelemetry.UploadSessionId = sessionId + uploadTelemetry.EditInitials = uploads.EditInitials + uploadTelemetry.UploadFilename = uploads.TelemetryUpload.UploadFilename + err = sd.Store.SaveTelemetryUpload(uploadTelemetry) if err != nil { - return err + return c.JSON(http.StatusInternalServerError, err.Error()) } } - return c.JSON(http.StatusOK, `{"result":"success"}`) + procedureOut, err := sd.Store.CallStoreProcedures(user.FirstName+" "+user.LastName, sessionId) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + return c.JSON(http.StatusOK, procedureOut) } -func (sd *PallidSturgeonHandler) SearchUpload(c echo.Context) error { +func (sd *PallidSturgeonHandler) CallStoreProcedures(c echo.Context) error { var err error - uploadSearches := []models.UploadSearch{} - if err := c.Bind(&uploadSearches); err != nil { - return err + uploadSessionId := c.Param("uploadSessionId") + id, err := strconv.Atoi(uploadSessionId) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) } - for _, uploadSearch := range uploadSearches { - uploadSearch.LastUpdated = time.Now() - uploadSearch.UploadedBy = "DeeLiang" - err = sd.Store.SaveSearchUpload(uploadSearch) - if err != nil { - return err - } - // err = sd.Store.UploadSearchDatasheetCheck(uploadSearch.UploadedBy, uploadSearch.UploadSessionId) - // if err != nil { - // return err - // } + user := c.Get("PSUSER").(models.User) + procedureOut, err := sd.Store.CallStoreProcedures(user.FirstName+" "+user.LastName, id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + return c.JSON(http.StatusOK, procedureOut) +} + +func (sd *PallidSturgeonHandler) GetErrorCount(c echo.Context) error { + id := c.QueryParam("id") - // err = sd.Store.UploadSearchDatasheet(uploadSearch.UploadedBy) - // if err != nil { - // return err - // } + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) } - return c.JSON(http.StatusOK, `{"result":"success"}`) + errorCounts, err := sd.Store.GetErrorCount(userInfo.OfficeCode) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, errorCounts) } -func (sd *PallidSturgeonHandler) SupplementalUpload(c echo.Context) error { - var err error - uploadSupplementals := []models.UploadSupplemental{} - if err := c.Bind(&uploadSupplementals); err != nil { - return err +func (sd *PallidSturgeonHandler) GetOfficeErrorLogs(c echo.Context) error { + id := c.QueryParam("id") + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) } - for _, uploadSupplemental := range uploadSupplementals { - uploadSupplemental.LastUpdated = time.Now() - uploadSupplemental.UploadedBy = "DeeLiang" - err = sd.Store.SaveSupplementalUpload(uploadSupplemental) - if err != nil { - return err - } - // err = sd.Store.UploadSearchDatasheetCheck(uploadSupplemental.UploadedBy, uploadSupplemental.UploadSessionId) - // if err != nil { - // return err - // } + officeErrorLogs, err := sd.Store.GetOfficeErrorLogs(userInfo.OfficeCode) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, officeErrorLogs) +} + +func (sd *PallidSturgeonHandler) GetUsgNoVialNumbers(c echo.Context) error { + id := c.QueryParam("id") - // err = sd.Store.UploadSearchDatasheet(uploadSupplemental.UploadedBy) - // if err != nil { - // return err - // } + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) } - return c.JSON(http.StatusOK, `{"result":"success"}`) + usgNoVialNumbers, err := sd.Store.GetUsgNoVialNumbers(userInfo.OfficeCode, userInfo.ProjectCode) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, usgNoVialNumbers) } -func (sd *PallidSturgeonHandler) ProcedureUpload(c echo.Context) error { - var err error - uploadProcedures := []models.UploadProcedure{} - if err := c.Bind(&uploadProcedures); err != nil { - return err +func (sd *PallidSturgeonHandler) GetUnapprovedDataSheets(c echo.Context) error { + id := c.QueryParam("id") + queryParams, err := marshalQuery(c) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) } - for _, uploadProcedure := range uploadProcedures { - uploadProcedure.LastUpdated = time.Now() - uploadProcedure.UploadedBy = "DeeLiang" - err = sd.Store.SaveProcedureUpload(uploadProcedure) - if err != nil { - return err - } - // err = sd.Store.UploadSearchDatasheetCheck(uploadProcedure.UploadedBy, uploadProcedure.UploadSessionId) - // if err != nil { - // return err - // } + unapprovedDataSheets, err := sd.Store.GetUnapprovedDataSheets(userInfo.ProjectCode, userInfo.OfficeCode, queryParams) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, unapprovedDataSheets) +} - // err = sd.Store.UploadSearchDatasheet(uploadProcedure.UploadedBy) - // if err != nil { - // return err - // } +func (sd *PallidSturgeonHandler) GetBafiDataSheets(c echo.Context) error { + id := c.QueryParam("id") + queryParams, err := marshalQuery(c) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) } - return c.JSON(http.StatusOK, `{"result":"success"}`) + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + bafiDataSheets, err := sd.Store.GetBafiDataSheets(userInfo.OfficeCode, userInfo.ProjectCode, queryParams) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, bafiDataSheets) } -func (sd *PallidSturgeonHandler) MrUpload(c echo.Context) error { - var err error - uploadMrs := []models.UploadMr{} - if err := c.Bind(&uploadMrs); err != nil { - return err +func (sd *PallidSturgeonHandler) GetUncheckedDataSheets(c echo.Context) error { + id := c.QueryParam("id") + queryParams, err := marshalQuery(c) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) } - for _, uploadMr := range uploadMrs { - uploadMr.LastUpdated = time.Now() - uploadMr.UploadedBy = "DeeLiang" - err = sd.Store.SaveMrUpload(uploadMr) - if err != nil { - return err - } - // err = sd.Store.UploadSearchDatasheetCheck(uploadMr.UploadedBy, uploadMr.UploadSessionId) - // if err != nil { - // return err - // } + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } - // err = sd.Store.UploadSearchDatasheet(uploadMr.UploadedBy) - // if err != nil { - // return err - // } + uncheckedDataSheets, err := sd.Store.GetUncheckedDataSheets(userInfo.OfficeCode, userInfo.ProjectCode, queryParams) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) } + return c.JSON(http.StatusOK, uncheckedDataSheets) +} - return c.JSON(http.StatusOK, `{"result":"success"}`) +func (sd *PallidSturgeonHandler) GetDownloadInfo(c echo.Context) error { + downloadInfo, err := sd.Store.GetDownloadInfo() + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, downloadInfo) } -func (sd *PallidSturgeonHandler) TelemetryFishUpload(c echo.Context) error { - var err error - uploadTelemetryFishes := []models.UploadTelemetryFish{} - if err := c.Bind(&uploadTelemetryFishes); err != nil { +func (sd *PallidSturgeonHandler) UploadDownloadZip(c echo.Context) error { + form, err := c.MultipartForm() + if err != nil { return err } - for _, uploadTelemetryFish := range uploadTelemetryFishes { - uploadTelemetryFish.LastUpdated = time.Now() - uploadTelemetryFish.UploadedBy = "DeeLiang" - err = sd.Store.SaveTelemetryFishUpload(uploadTelemetryFish) - if err != nil { - return err - } + files := form.File["file"] + + downloadInfo, err := sd.Store.UploadDownloadZip(files[0]) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, downloadInfo) +} - // err = sd.Store.UploadSearchDatasheetCheck(uploadTelemetryFish.UploadedBy, uploadTelemetryFish.UploadSessionId) - // if err != nil { - // return err - // } +func (sd *PallidSturgeonHandler) GetDownloadZip(c echo.Context) error { - // err = sd.Store.UploadSearchDatasheet(uploadTelemetryFish.UploadedBy) - // if err != nil { - // return err - // } + downloadZipName, err := sd.Store.GetDownloadZip() + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) } + defer os.Remove(downloadZipName) + return c.Inline(downloadZipName, downloadZipName) +} - return c.JSON(http.StatusOK, `{"result":"success"}`) +func (sd *PallidSturgeonHandler) GetUploadSessionLogs(c echo.Context) error { + uploadSessionId := c.QueryParam("uploadSessionId") + + user := c.Get("PSUSER").(models.User) + userInfo, err := sd.Store.GetUser(user.Email) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + bends, err := sd.Store.GetUploadSessionLogs(userInfo.FirstName+" "+userInfo.LastName, uploadSessionId) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, bends) +} + +func (sd *PallidSturgeonHandler) GetSitesExport(c echo.Context) error { + id, year, segmentCode, seasonCode, bendrn := c.QueryParam("id"), c.QueryParam("year"), c.QueryParam("segmentCode"), c.QueryParam("seasonCode"), c.QueryParam("bendrn") + + userInfo, err := sd.Store.GetUserRoleById(id) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + + exportData, err := sd.Store.GetSitesExport(year, userInfo.OfficeCode, userInfo.ProjectCode, segmentCode, seasonCode, bendrn) + if err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) + } + return c.JSON(http.StatusOK, exportData) } diff --git a/handlers/userhandler.go b/handlers/userhandler.go index 9607ac8..e3f1423 100644 --- a/handlers/userhandler.go +++ b/handlers/userhandler.go @@ -1,376 +1,88 @@ package handlers import ( - "bytes" - "encoding/json" - "io/ioutil" - "log" "net/http" - "net/url" - "strings" - "di2e.net/cwbi/pallid_sturgeon_api/server/config" - "di2e.net/cwbi/pallid_sturgeon_api/server/models" + "github.com/USACE/pallid_sturgeon_api/server/models" + "github.com/USACE/pallid_sturgeon_api/server/stores" "github.com/labstack/echo/v4" ) type UserHandler struct { - Config *config.AppConfig + Store *stores.AuthStore } -var response models.KeyCloakResponse - -func (h *UserHandler) GetAdminToken() error { +func (u *UserHandler) AddUserRoleOffice(c echo.Context) error { var err error - var decodedResponse *models.KeyCloakResponse - - hc := http.Client{} - form := url.Values{} - - // Build form to POST - form.Add("client_id", "admin-cli") - form.Add("username", h.Config.AdminUsername) - form.Add("password", h.Config.AdminPassword) - form.Add("grant_type", "password") - - // Here is where the code stops working, returns an empty body - tokenUrl := h.Config.KeycloakUrl + "/realms/master/protocol/openid-connect/token" - req, err := http.NewRequest("POST", tokenUrl, strings.NewReader(form.Encode())) - req.PostForm = form - req.Header.Add("Content-Type", "application/x-www-form-urlencoded") - if err != nil { - return err - } - - resp, err := hc.Do(req) - if err != nil { - return err + userRoleOffice := models.UserRoleOffice{} + if err := c.Bind(&userRoleOffice); err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) } - - err = json.NewDecoder(resp.Body).Decode(&decodedResponse) - + err = u.Store.AddUserRoleOffice(userRoleOffice) if err != nil { - return err - } else { - // We only need the Access Token - response.AccessToken = decodedResponse.AccessToken - response.Expires = decodedResponse.Expires + return c.JSON(http.StatusInternalServerError, err.Error()) } - return err + return c.JSON(http.StatusOK, `{"result":"success"}`) } -func (h *UserHandler) GetClientId() error { - - url := h.Config.KeycloakUrl + "/admin/realms/" + h.Config.Realm + "/clients?clientId=" + h.Config.ClientName - - // Create a new request using http - req, err := http.NewRequest("GET", url, nil) - if err != nil { - return err - } - - // add authorization header to the req - req.Header.Add("Authorization", "Bearer "+response.AccessToken) +func (u *UserHandler) GetUserRoleOffices(c echo.Context) error { + email := c.Param("email") - // Send req using http Client - client := &http.Client{} - resp, err := client.Do(req) + roleOfficeItems, err := u.Store.GetUserRoleOffices(email) if err != nil { - log.Println("Error on response.\n[ERROR] -", err) + return c.JSON(http.StatusInternalServerError, err.Error()) } - defer resp.Body.Close() - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - log.Println("Error while reading the response bytes:", err) - } - - var bodyResponse []models.KeyCloakResponse - json.Unmarshal([]byte(body), &bodyResponse) - - if len(bodyResponse) > 0 { - response.ClientId = bodyResponse[0].ClientId - } - - return err + return c.JSON(http.StatusOK, roleOfficeItems) } -func (h *UserHandler) GetRoleId(roleType string) (string, error) { - - url := h.Config.KeycloakUrl + "/admin/realms/" + h.Config.Realm + "/clients/" + response.ClientId + "/roles/" + roleType - - // Create a new request using http - req, err := http.NewRequest("GET", url, nil) - if err != nil { - return "", err - } - - // add authorization header to the req - req.Header.Add("Authorization", "Bearer "+response.AccessToken) - - // Send req using http Client - client := &http.Client{} - resp, err := client.Do(req) - if err != nil { - log.Println("Error on response.\n[ERROR] -", err) - } - defer resp.Body.Close() +func (u *UserHandler) GetUserRoleOfficeById(c echo.Context) error { + id := c.Param("id") - body, err := ioutil.ReadAll(resp.Body) + roleOffice, err := u.Store.GetUserRoleOfficeById(id) if err != nil { - log.Println("Error while reading the response bytes:", err) - } - - var role models.KeyCloakRole - json.Unmarshal([]byte(body), &role) - - if role.RoleId != "" { - return role.RoleId, err + return c.JSON(http.StatusInternalServerError, err.Error()) } - return "", err + return c.JSON(http.StatusOK, roleOffice) } -func (h *UserHandler) GetUsersByRoleType(c echo.Context) error { - roleType := c.Param("roleType") - var err error - - err = h.GetAdminToken() - if err != nil { - return err - } - - if response.ClientId == "" { - err = h.GetClientId() - if err != nil { - return err - } - } - - url := h.Config.KeycloakUrl + "/admin/realms/" + h.Config.Realm + "/clients/" + response.ClientId + "/roles/" + roleType + "/users" - - // Create a new request using http - req, err := http.NewRequest("GET", url, nil) - if err != nil { - return err - } - - // add authorization header to the req - req.Header.Add("Authorization", "Bearer "+response.AccessToken) - - // Send req using http Client - client := &http.Client{} - resp, err := client.Do(req) - if err != nil { - log.Println("Error on response.\n[ERROR] -", err) - } - defer resp.Body.Close() - - body, err := ioutil.ReadAll(resp.Body) +func (u *UserHandler) GetUsers(c echo.Context) error { + users, err := u.Store.GetUsers() if err != nil { - log.Println("Error while reading the response bytes:", err) + return c.JSON(http.StatusInternalServerError, err.Error()) } - var users []models.KeyCloakUser - json.Unmarshal([]byte(body), &users) - return c.JSON(http.StatusOK, users) } -func (h *UserHandler) GetUserByUsername(userName string) ([]models.KeyCloakUser, error) { - var users []models.KeyCloakUser - - url := h.Config.KeycloakUrl + "/admin/realms/" + h.Config.Realm + "/users?username=" + userName - - // Create a new request using http - req, err := http.NewRequest("GET", url, nil) - if err != nil { - return users, err - } - - // add authorization header to the req - req.Header.Add("Authorization", "Bearer "+response.AccessToken) - - // Send req using http Client - client := &http.Client{} - resp, err := client.Do(req) - if err != nil { - log.Println("Error on response.\n[ERROR] -", err) - } - defer resp.Body.Close() - - body, err := ioutil.ReadAll(resp.Body) +func (u *UserHandler) GetUserAccessRequests(c echo.Context) error { + users, err := u.Store.GetUserAccessRequests() if err != nil { - log.Println("Error while reading the response bytes:", err) + return c.JSON(http.StatusInternalServerError, err.Error()) } - json.Unmarshal([]byte(body), &users) - - return users, err + return c.JSON(http.StatusOK, users) } -func (h *UserHandler) AddUserRoleRequest(c echo.Context) error { - var err error - - user := models.KeyCloakUser{} - if err := c.Bind(&user); err != nil { - return err - } - - err = h.GetAdminToken() - if err != nil { - return err - } - - if response.ClientId == "" { - err = h.GetClientId() - if err != nil { - return err - } +func (u *UserHandler) UpdateUserRoleOffice(c echo.Context) error { + userData := models.UserRoleOffice{} + if err := c.Bind(&userData); err != nil { + return c.JSON(http.StatusInternalServerError, err.Error()) } - - err = h.AddUserOrUserRole(user) + err := u.Store.UpdateUserRoleOffice(userData) if err != nil { - return err + return c.JSON(http.StatusInternalServerError, err.Error()) } - return c.JSON(http.StatusOK, `{"result":"success"}`) } -func (h *UserHandler) AddUserOrUserRole(user models.KeyCloakUser) error { - var err error - - roleId, err := h.GetRoleId(user.Role.RoleName) - if err != nil { - return err - } - keycloakUser, err := h.GetUserByUsername(user.Username) - if err != nil { - return err - - } - if len(keycloakUser) == 0 { - log.Println("user doesn't exist, create user") - err = h.AddUser(user) - if err != nil { - return err - } - - err = h.AddUserOrUserRole(user) - if err != nil { - return err - } - } else { - err = h.AddUserRole(keycloakUser[0], roleId, user.Role.RoleName) - if err != nil { - return err - } - } - - return err -} - -func (h *UserHandler) AddUser(user models.KeyCloakUser) error { - var err error - - hc := http.Client{} - - values := map[string]interface{}{"firstName": user.FirstName, - "lastName": user.LastName, - "username": user.Username, - "email": user.Username, - "enabled": "true", - // "credentials": []interface{}{map[string]string{"type": "password", "value": "password"}}, - "attributes": map[string]string{"cacUID": user.UserID}} - json_data, err := json.Marshal(values) - if err != nil { - return err - } - - tokenUrl := h.Config.KeycloakUrl + "/admin/realms/" + h.Config.Realm + "/users/" - req, err := http.NewRequest("POST", tokenUrl, bytes.NewBuffer(json_data)) - if err != nil { - return err - } - - req.Header.Add("Content-Type", "application/json") - req.Header.Add("Authorization", "Bearer "+response.AccessToken) - - resp, err := hc.Do(req) - if err != nil { - return err - } - - _, err = ioutil.ReadAll(resp.Body) - - return err -} - -func (h *UserHandler) AddUserRole(user models.KeyCloakUser, roleId string, roleType string) error { - var err error - - hc := http.Client{} - - values := []interface{}{map[string]string{"id": roleId, "name": roleType}} - json_data, err := json.Marshal(values) - if err != nil { - return err - } - - // Here is where the code stops working, returns an empty body - tokenUrl := h.Config.KeycloakUrl + "/admin/realms/" + h.Config.Realm + "/users/" + user.UserID + "/role-mappings/clients/" + response.ClientId - req, err := http.NewRequest("POST", tokenUrl, bytes.NewBuffer(json_data)) - if err != nil { - return err - } - - req.Header.Add("Content-Type", "application/json") - req.Header.Add("Authorization", "Bearer "+response.AccessToken) - - resp, err := hc.Do(req) - if err != nil { - return err - } - - _, err = ioutil.ReadAll(resp.Body) - - return err -} - -func (h *UserHandler) DeleteUserRole(c echo.Context) error { - userId := c.Param("userId") - roleType := c.Param("roleType") - var err error - - hc := http.Client{} - - roleId, err := h.GetRoleId(roleType) - if err != nil { - return err - } - - values := []interface{}{map[string]string{"id": roleId, "name": roleType}} - json_data, err := json.Marshal(values) - if err != nil { - return err - } - - tokenUrl := h.Config.KeycloakUrl + "/admin/realms/" + h.Config.Realm + "/users/" + userId + "/role-mappings/clients/" + response.ClientId - req, err := http.NewRequest("DELETE", tokenUrl, bytes.NewBuffer(json_data)) - if err != nil { - return err - } - - req.Header.Add("Content-Type", "application/json") - req.Header.Add("Authorization", "Bearer "+response.AccessToken) - - resp, err := hc.Do(req) +func (u *UserHandler) GetUsers2(c echo.Context) error { + users, err := u.Store.GetUsers2() if err != nil { - return err + return c.JSON(http.StatusInternalServerError, err.Error()) } - _, err = ioutil.ReadAll(resp.Body) - - return err + return c.JSON(http.StatusOK, users) } diff --git a/models/auth.go b/models/auth.go index 713cb4f..05a289a 100644 --- a/models/auth.go +++ b/models/auth.go @@ -1,19 +1,19 @@ package models type JwtClaim struct { - Sub string - Name string - Email string - Roles []interface{} + CacUid *string + Name string + Email string + FirstName string + LastName string + Roles []interface{} } type SearchParams struct { - Page int `json:"page"` - PageSize int `json:"size"` - OrderBy string `json:"orderBy"` - Filter string `json:"filter"` - PhaseType string `json:"phaseType"` - PhaseStatus string `json:"phaseStatus"` + Page int `json:"page"` + PageSize int `json:"size"` + OrderBy string `json:"orderBy"` + Filter string `json:"filter"` } // type User struct { @@ -26,10 +26,28 @@ type SearchParams struct { // } type User struct { - UserID string `db:"user_id" json:"userId"` - UserName string `db:"user_name" json:"userName"` - Email string `db:"email" json:"email"` - Deleted bool `db:"deleted" json:"-"` + ID int `db:"id" json:"id"` + UserID int `db:"user_id" json:"userId"` + UserName string `db:"user_name" json:"userName"` + FirstName string `db:"first_name" json:"firstName"` + LastName string `db:"last_name" json:"lastName"` + Email string `db:"email" json:"email"` + CacUid *string `db:"edipi" json:"cacUid"` + RoleID int `db:"role_id" json:"roleId"` + OfficeID int `db:"office_id" json:"officeId"` + Role string `db:"description" json:"role"` + OfficeCode string `db:"code" json:"officeCode"` + ProjectCode string `db:"project_code" json:"projectCode"` +} + +type UserRoleOffice struct { + ID int `db:"id" json:"id"` + UserID int `db:"user_id" json:"userId"` + RoleID int `db:"role_id" json:"roleId"` + OfficeID int `db:"office_id" json:"officeId"` + Role string `db:"description" json:"role"` + OfficeCode string `db:"code" json:"officeCode"` + ProjectCode string `db:"project_code" json:"projectCode"` } type KeyCloakResponse struct { @@ -52,7 +70,9 @@ type KeyCloakRole struct { } type Response struct { - Message string `json:"message"` + Message string `json:"message"` + Status string `json:"status"` + Data []string `json:"data"` } type Jwks struct { diff --git a/models/pallidsturgeon.go b/models/pallidsturgeon.go index 14af85f..ba49cf1 100644 --- a/models/pallidsturgeon.go +++ b/models/pallidsturgeon.go @@ -1,132 +1,567 @@ package models -import "time" +import ( + "time" +) + +type Role struct { + ID int `db:"id" json:"id"` + Description string `db:"description" json:"description"` +} type Season struct { - ID int `db:"id" json:"id"` - Code string `db:"code" json:"code"` - Description string `db:"description" json:"description"` - FieldAppFlag string `db:"field_app_flag" json:"fieldAppFlag"` - ProjectCode *int `db:"project_code" json:"projectCode"` + ID int `db:"s_id" json:"id"` + Code string `db:"season_code" json:"code"` + Description string `db:"season_description" json:"description"` + FieldAppFlag string `db:"field_app" json:"fieldAppFlag"` + ProjectCode *int `db:"PROJECT_CODE" json:"projectCode"` +} + +type FieldOffice struct { + ID int `db:"FO_ID" json:"id"` + Code string `db:"FIELD_OFFICE_CODE" json:"code"` + Description string `db:"FIELD_OFFICE_DESCRIPTION" json:"description"` + State string `db:"state" json:"state"` +} + +type SampleMethod struct { + Code string `db:"SAMPLE_TYPE_CODE" json:"code"` + Description string `db:"SAMPLE_TYPE_DESCRIPTION" json:"description"` +} + +type SampleUnitType struct { + Code string `db:"SAMPLE_UNIT_TYPE_CODE" json:"code"` + Description string `db:"SAMPLE_UNIT_TYPE_DESCRIPTION" json:"description"` +} + +type Segment struct { + Code int `db:"code" json:"code"` + Description *string `db:"description" json:"description"` +} + +type SampleUnit struct { + SampleUnit int `db:"sample_unit" json:"sampleUnit"` + Description *string `db:"description" json:"description"` +} + +type Bend struct { + ID int `db:"BRM_ID" json:"id"` + BendNumber int `db:"BEND_NUM" json:"bendNumber"` + Description *string `db:"B_DESC" json:"description"` + SegmentCode int `db:"B_SEGMENT" json:"segmentCode"` + UpperRiverMile *string `db:"upper_river_mile" json:"upperRiverMile"` + LowerRiverMile *string `db:"lower_river_mile" json:"lowerRiverMile"` + State string `db:"state" json:"state"` +} + +type Bend2 struct { + ID int `db:"id" json:"id"` + Code int `db:"code" json:"code"` + Description *string `db:"description" json:"description"` + SegmentId int `db:"segment_id" json:"segmentId"` + UpperRiverMile *string `db:"upper_river_mile" json:"upperRiverMile"` + LowerRiverMile *string `db:"lower_river_mile" json:"lowerRiverMile"` +} + +type BendRn struct { + ID int `db:"bs_id" json:"id"` + Code string `db:"bend_selection_code" json:"code"` + Description string `db:"bend_selection_description" json:"description"` +} + +type Project struct { + Code int `db:"project_code" json:"code"` + Description string `db:"project_description" json:"description"` +} + +type Meso struct { + Code string `db:"mesohabitat_code" json:"code"` +} + +type StructureFlow struct { + ID int `db:"structure_flow_code" json:"id"` + Code string `db:"structure_flow" json:"code"` +} + +type StructureMod struct { + Code string `db:"structure_mod_code" json:"code"` + Description string `db:"structure_mod" json:"description"` +} + +type Species struct { + Code string `db:"alpha_code" json:"code"` +} + +type FtPrefix struct { + Code string `db:"tag_prefix_code" json:"code"` +} + +type Mr struct { + Code string `db:"mark_recapture_code" json:"code"` + Description string `db:"mark_recapture_description" json:"description"` +} + +type Otolith struct { + Code string `db:"code" json:"code"` + Description string `db:"description" json:"description"` +} + +type SetSite1 struct { + Code string `db:"code" json:"code"` + Description string `db:"description" json:"description"` +} + +type SetSite2 struct { + Code string `db:"code" json:"code"` + Description string `db:"description" json:"description"` +} + +type Year struct { + Year string `db:"year" json:"year"` +} + +type FishSummaryWithCount struct { + Items []FishSummary `json:"items"` + TotalCount int `json:"totalCount"` +} + +type FishSummary struct { + UniqueID int `db:"mr_id" json:"uniqueID"` + FishID int `db:"f_id" json:"fishId"` + Year int `db:"year" json:"year"` + FieldOffice string `db:"FIELD_OFFICE_CODE" json:"fieldOffice"` + Project int `db:"PROJECT_CODE" json:"project"` + Segment int `db:"SEGMENT_CODE" json:"segment"` + Season string `db:"SEASON_CODE" json:"season"` + Bend *int `db:"BEND_NUMBER" json:"bend"` + Bendrn string `db:"BEND_R_OR_N" json:"bendrn"` + BendRiverMile *float64 `db:"bend_river_mile" json:"bendRiverMile"` + Panelhook string `db:"panelhook" json:"panelhook"` + Species string `db:"SPECIES_CODE" json:"species"` + HatcheryOrigin string `db:"HATCHERY_ORIGIN_CODE" json:"hatcheryOrigin"` + CheckedBy string `db:"checkby" json:"checkedby"` + EditInitials string `db:"edit_initials" json:"editInitials"` + LastEditComment string `db:"last_edit_comment" json:"lastEditComment"` +} + +type SuppSummaryWithCount struct { + Items []SuppSummary `json:"items"` + TotalCount int `json:"totalCount"` +} + +type SuppSummary struct { + FishCode string `db:"fish_code" json:"fishCode"` + UniqueID int `db:"mr_id" json:"uniqueID"` + FishID int `db:"f_id" json:"fishId"` + Year int `db:"year" json:"year"` + SuppID int `db:"sid_display" json:"suppId"` + FieldOffice string `db:"FIELD_OFFICE_CODE" json:"fieldOffice"` + Project int `db:"PROJECT_CODE" json:"project"` + Segment int `db:"SEGMENT_CODE" json:"segment"` + Season string `db:"SEASON_CODE" json:"season"` + Bend *int `db:"BEND_NUMBER" json:"bend"` + Bendrn *string `db:"BEND_R_OR_N" json:"bendrn"` + BendRiverMile *float64 `db:"bend_river_mile" json:"bendRiverMile"` + HatcheryOrigin *string `db:"HATCHERY_ORIGIN_CODE" json:"hatcheryOrigin"` + TagNumber *string `db:"tag_number" json:"tagNumber"` + CheckedBy string `db:"checkby" json:"checkedby"` + EditInitials string `db:"edit_initials" json:"editInitials"` + LastEditComment string `db:"last_edit_comment" json:"lastEditComment"` +} + +type MissouriSummaryWithCount struct { + Items []MissouriSummary `json:"items"` + TotalCount int `json:"totalCount"` +} + +type MissouriSummary struct { + UniqueID int `db:"mr_id" json:"uniqueID"` + Year int `db:"year" json:"year"` + FieldOffice string `db:"FIELD_OFFICE_CODE" json:"fieldOffice"` + Project int `db:"PROJECT_CODE" json:"project"` + Segment int `db:"SEGMENT_CODE" json:"segment"` + Season string `db:"SEASON_CODE" json:"season"` + Bend *int `db:"BEND_NUMBER" json:"bend"` + Bendrn string `db:"BEND_R_OR_N" json:"bendrn"` + BendRiverMile *float64 `db:"bend_river_mile" json:"bendRiverMile"` + Subsample *int `db:"subsample" json:"subsample"` + Pass *int `db:"subsample_pass" json:"pass"` + SetDate *string `db:"set_date" json:"setDate"` + SetDateTime time.Time `db:"set_date" json:"setDateTime"` + Conductivity *string `db:"conductivity" json:"conductivity"` + CheckedBy string `db:"checkby" json:"checkedby"` + Approved *int `db:"approved" json:"approved"` + EditInitials string `db:"edit_initials" json:"editInitials"` + LastEditComment string `db:"last_edit_comment" json:"lastEditComment"` +} + +type GeneticSummaryWithCount struct { + Items []GeneticSummary `json:"items"` + TotalCount int `json:"totalCount"` +} + +type GeneticSummary struct { + Year int `db:"year" json:"year"` + FieldOffice string `db:"FIELD_OFFICE_CODE" json:"fieldOffice"` + Project int `db:"PROJECT_CODE" json:"project"` + SturgeonType string `db:"sturgeon_type" json:"sturgeonType"` + GeneticsVialNumber string `db:"genetics_vial_number" json:"GeneticsVialNumber"` + PitTag string `db:"pit_tag" json:"pitTag"` + River string `db:"river" json:"river"` + RiverMile *float64 `db:"river_mile" json:"riverMile"` + State string `db:"state" json:"state"` + SetDate time.Time `db:"set_date" json:"setDate"` + Broodstock string `db:"broodstock_yn" json:"broodstock"` + HatchWild string `db:"hatchwild_yn" json:"hatchWild"` + SpeciesID string `db:"speciesid_yn" json:"speciesId"` + Archive string `db:"archive_yn" json:"archive"` + EditInitials string `db:"edit_initials" json:"editInitials"` + LastEditComment string `db:"last_edit_comment" json:"lastEditComment"` +} + +type SearchSummaryWithCount struct { + Items []SearchSummary `json:"items"` + TotalCount int `json:"totalCount"` +} + +type SearchSummary struct { + SeID int `db:"se_id" json:"seId"` + SiteID int `db:"site_id" json:"site_id"` + Year int `db:"year" json:"year"` + FieldOffice string `db:"fieldoffice" json:"fieldoffice"` + Project int `db:"project_id" json:"projectId"` + Segment int `db:"segment_id" json:"segmentId"` + Season string `db:"season" json:"season"` + SearchDate *string `db:"search_date" json:"searchDate"` + Recorder string `db:"recorder" json:"recorder"` + SearchTypeCode string `db:"search_type_code" json:"searchTypeCode"` + StartTime string `db:"start_time" json:"startTime"` + StartLatitude float64 `db:"start_latitude" json:"startLatitude"` + StartLongitude float64 `db:"start_longitude" json:"startLongitude"` + StopTime string `db:"stop_time" json:"stopTime"` + StopLatitude float64 `db:"stop_latitude" json:"stopLatitude"` + StopLongitude float64 `db:"stop_longitude" json:"stopLongitude"` + Temp *string `db:"temp" json:"temp"` + Conductivity *string `db:"conductivity" json:"conductivity"` + Checkby string `db:"checkby" json:"checkby"` + Bend *int `db:"bend" json:"bend"` + Bendrn string `db:"bendrn" json:"bendrn"` + BendRiverMile *float64 `db:"bend_river_mile" json:"bendRiverMile"` + SearchDay *int `db:"search_day" json:"searchDay"` +} + +type SummaryWithCount struct { + Items []map[string]string `json:"items"` + TotalCount int `json:"totalCount"` +} + +type ProcedureSummaryWithCount struct { + Items []ProcedureSummary `json:"items"` + TotalCount int `json:"totalCount"` +} + +type ProcedureSummary struct { + ID int `db:"pid_display" json:"id"` + UniqueID int `db:"mr_id" json:"uniqueId"` + Year *int `db:"year" json:"year"` + FieldOffice *string `db:"field_office_code" json:"fieldOffice"` + Project *int `db:"project_code" json:"project"` + Segment *int `db:"segment_code" json:"segment"` + Season *string `db:"season_code" json:"season"` + PurposeCode string `db:"purpose_code" json:"purposeCode"` + // ProcedureDate time.Time `db:"procedure_date" json:"procedureDate"` + NewRadioTagNum *int `db:"new_radio_tag_num" json:"newRadioTagNum"` + NewFrequencyId *int `db:"new_frequency_id" json:"newFrequencyId"` + SpawnCode *string `db:"spawn_code" json:"spawnCode"` + ExpectedSpawnYear *int `db:"expected_spawn_year" json:"expectedSpawnYear"` + Bend int `db:"bend_number" json:"bend"` + Bendrn string `db:"bend_r_or_n" json:"bendrn"` + BendRiverMile float64 `db:"bend_river_mile" json:"bendRiverMile"` +} + +type TelemetrySummaryWithCount struct { + Items []TelemetrySummary `json:"items"` + TotalCount int `json:"totalCount"` +} + +type TelemetrySummary struct { + TId int `db:"t_id" json:"tId"` + TFid string `db:"t_fid" json:"tFid"` + SeId int `db:"se_id" json:"seId"` + SiteID int `db:"site_id" json:"site_id"` + Year *int `db:"year" json:"year"` + FieldOffice *string `db:"field_office_code" json:"fieldOffice"` + Project *int `db:"project_code" json:"project"` + Segment *int `db:"segment_code" json:"segment"` + Season *string `db:"season_code" json:"season"` + Bend *float64 `db:"bend_number" json:"bend"` + RadioTagNum int `db:"radio_tag_num" json:"radioTagNum"` + FrequencyIdCode int `db:"frequency_id" json:"frequencyIdCode"` + CaptureTime string `db:"capture_time" json:"captureTime"` + CaptureLatitude float64 `db:"capture_latitude" json:"captureLatitude"` + CaptureLongitude float64 `db:"capture_longitude" json:"captureLongitude"` + PositionConfidence *float64 `db:"position_confidence" json:"positionConfidence"` + MacroId *string `db:"macro_code" json:"macroId"` + MesoId *string `db:"meso_code" json:"mesoId"` + Depth *float64 `db:"depth" json:"depth"` + Conductivity *float64 `db:"conductivity" json:"conductivity"` + Turbidity *float64 `db:"turbidity" json:"turbidity"` + SearchDate *string `db:"search_date" json:"searchDate"` + SearchDay *int `db:"search_day" json:"searchDay"` + Temp *float64 `db:"temp" json:"temp"` + Silt *int `db:"silt" json:"silt"` + Sand *int `db:"sand" json:"sand"` + Gravel *int `db:"gravel" json:"gravel"` + Comments string `db:"comments" json:"comments"` +} + +type Upload struct { + EditInitials string `db:"edit_initials" json:"editInitials"` + SiteUpload UploadSiteData `json:"siteUpload"` + FishUpload UploadFishData `json:"fishUpload"` + SearchUpload UploadSearchData `json:"searchUpload"` + ProcedureUpload UploadProcedureData `json:"procedureUpload"` + SupplementalUpload UploadSupplementalData `json:"supplementalUpload"` + MoriverUpload UploadMoriverData `json:"moriverUpload"` + TelemetryUpload UploadTelemetryData `json:"telemetryUpload"` +} + +type SiteDataEntryWithCount struct { + Items []UploadSite `json:"items"` + TotalCount int `json:"totalCount"` +} + +type UploadSiteData struct { + Items []UploadSite `json:"items"` + UploadFilename string `db:"upload_filename" json:"uploadFilename"` } type UploadSite struct { - SiteID int `db:"site_id" json:"siteId"` - SiteFID string `db:"site_fid" json:"siteFid"` - SiteYear int `db:"site_year" json:"siteYear"` - FieldofficeID string `db:"fieldoffice_id" json:"fieldofficeId"` - FieldOffice string `db:"field_office" json:"fieldOffice"` - ProjectId int `db:"project_id" json:"projectId"` - Project string `db:"project" json:"project"` - SegmentId int `db:"segment_id" json:"segmentId"` - Segment string `db:"segment" json:"segment"` - SeasonId string `db:"season_id" json:"seasonId"` - Season string `db:"season" json:"season"` - Bend int `db:"bend" json:"bend"` - Bendrn string `db:"bendrn" json:"bendrn"` - BendRiverMile float64 `db:"bend_river_mile" json:"bendRiverMile"` - Comments string `db:"comments" json:"comments"` - LastUpdated time.Time `db:"last_updated" json:"lastUpdated"` - UploadSessionId int `db:"upload_session_id" json:"uploadSessionId"` - UploadedBy string `db:"uploaded_by" json:"uploadedBy"` - UploadFilename string `db:"upload_filename" json:"uploadFilename"` + // BrmID int `db:"brm_id" json:"brmId"` + SiteID int `db:"site_id" json:"siteId"` + SiteFID string `db:"site_fid" json:"siteFid"` + SiteYear int `db:"site_year" json:"siteYear"` + FieldofficeID string `db:"fieldoffice_id" json:"fieldofficeId"` + FieldOffice string `db:"field_office" json:"fieldOffice"` + ProjectId int `db:"project_id" json:"projectId"` + Project string `db:"project" json:"project"` + SegmentId int `db:"segment_id" json:"segmentId"` + Segment string `db:"segment" json:"segment"` + SeasonId string `db:"season_id" json:"seasonId"` + Season string `db:"season" json:"season"` + SampleUnitTypeCode string `db:"sample_unit_type" json:"sampleUnitTypeCode"` + Bend int `db:"bend" json:"bend"` + Bendrn string `db:"bendrn" json:"bendrn"` + BendRiverMile float64 `db:"bend_river_mile" json:"bendRiverMile"` + Comments string `db:"comments" json:"comments"` + EditInitials string `db:"edit_initials" json:"editInitials"` + LastUpdated time.Time `db:"last_updated" json:"lastUpdated"` + UploadSessionId int `db:"upload_session_id" json:"uploadSessionId"` + UploadedBy string `db:"uploaded_by" json:"uploadedBy"` + UploadFilename string `db:"upload_filename" json:"uploadFilename"` +} + +type SitesWithCount struct { + Items []Sites `json:"items"` + TotalCount int `json:"totalCount"` +} + +type Sites struct { + SiteID int `db:"site_id" json:"siteId"` + SiteFID string `db:"site_fid" json:"siteFid"` + Year int `db:"year" json:"year"` + BrmID *int `db:"brm_id" json:"brmId"` + FieldofficeId string `db:"fieldoffice" json:"fieldoffice"` + ProjectId int `db:"project_id" json:"projectId"` + SegmentId int `db:"segment_id" json:"segmentId"` + SeasonId string `db:"season" json:"season"` + SampleUnitTypeCode string `db:"sample_unit_type" json:"sampleUnitType"` + Bend int `db:"bend" json:"bend"` + Bendrn string `db:"bendrn" json:"bendrn"` + BendRiverMile *string `db:"bend_river_mile" json:"bendRiverMile"` + Complete *int `db:"complete" json:"complete"` + Approved *int `db:"approved" json:"approved"` + BkgColor string `db:"bkg_color" json:"bkgColor"` + EditInitials string `db:"edit_initials" json:"editInitials"` + LastEditComment string `db:"last_edit_comment" json:"last_edit_comment"` + LastUpdated time.Time `db:"last_updated" json:"lastUpdated"` + UploadSessionId *int `db:"upload_session_id" json:"uploadSessionId"` + UploadedBy string `db:"uploaded_by" json:"uploadedBy"` + UploadFilename string `db:"upload_filename" json:"uploadFilename"` +} + +type FishDataEntryWithCount struct { + Items []UploadFish `json:"items"` + TotalCount int `json:"totalCount"` +} + +type UploadFishData struct { + Items []UploadFish `json:"items"` + UploadFilename string `db:"upload_filename" json:"uploadFilename"` } type UploadFish struct { - SiteID int `db:"site_id" json:"siteId"` - FFid string `db:"f_fid" json:"fFid"` - MrFid string `db:"mr_fid" json:"mrFid"` - Panelhook string `db:"panelhook" json:"panelhook"` - Bait string `db:"bait" json:"bait"` - Species string `db:"species" json:"species"` - Length float32 `db:"length" json:"length"` - Weight float32 `db:"weight" json:"weight"` - Fishcount int `db:"fishcount" json:"fishcount"` - FinCurl string `db:"fin_curl" json:"finCurl"` - Otolith string `db:"otolith" json:"otolith"` - Rayspine string `db:"rayspine" json:"rayspine"` - Scale string `db:"scale" json:"scale"` - Ftprefix string `db:"ftprefix" json:"ftprefix"` - Ftnum string `db:"ftnum" json:"ftnum"` - Ftmr string `db:"ftmr" json:"ftmr"` - Comments string `db:"comments" json:"comments"` - LastUpdated time.Time `db:"last_updated" json:"lastUpdated"` - UploadSessionId int `db:"upload_session_id" json:"uploadSessionId"` - UploadedBy string `db:"uploaded_by" json:"uploadedBy"` - UploadFilename string `db:"upload_filename" json:"uploadFilename"` + Id *int `db:"id" json:"id"` + SiteID int `db:"site_id" json:"siteId"` + MrFid string `db:"mr_fid" json:"mrFid"` + Fid int `db:"f_id" json:"fid"` + Ffid string `db:"f_fid" json:"ffid"` + MrID *int `db:"mr_id" json:"mrId"` + Panelhook *string `db:"panelhook" json:"panelHook"` + Bait *string `db:"bait" json:"bait"` + Species *string `db:"species" json:"species"` + Length *float32 `db:"length" json:"length"` + Weight *float32 `db:"weight" json:"weight"` + Fishcount *int `db:"fishcount" json:"countF"` + FinCurl *string `db:"fin_curl" json:"finCurl"` + Otolith *string `db:"otolith" json:"otolith"` + Rayspine *string `db:"rayspine" json:"raySpine"` + Scale *string `db:"scale" json:"scale"` + Ftprefix *string `db:"ftprefix" json:"ftPrefix"` + Ftnum *string `db:"ftnum" json:"floyTag"` + Ftmr *string `db:"ftmr" json:"mR"` + Comments string `db:"comments" json:"comments"` + Approved *int `db:"approved" json:"approved"` + LastUpdated time.Time `db:"last_updated" json:"lastUpdated"` + UploadSessionId int `db:"upload_session_id" json:"uploadSessionId"` + EditInitials string `db:"edit_initials" json:"editInitials"` + LastEditComment string `db:"last_edit_comment" json:"lastEditComment"` + UploadedBy string `db:"uploaded_by" json:"uploadedBy"` + UploadFilename string `db:"upload_filename" json:"uploadFilename"` + Project *int `db:"PROJECT_ID" json:"project"` + UniqueID *int `db:"uniqueidentifier" json:"uniqueID"` + Segment *int `db:"SEGMENT_ID" json:"segment"` + Fieldoffice string `db:"FIELDOFFICE" json:"fieldOffice"` + GeneticsVialNumber string `db:"genetics_vial_number" json:"geneticsVialNumber"` + Condition *float64 `db:"condition" json:"condition"` +} + +type SearchDataEntryWithCount struct { + Items []UploadSearch `json:"items"` + TotalCount int `json:"totalCount"` +} + +type UploadSearchData struct { + Items []UploadSearch `json:"items"` + UploadFilename string `db:"upload_filename" json:"uploadFilename"` + TotalCount int `json:"totalCount"` } type UploadSearch struct { - SiteID int `db:"site_id" json:"siteId"` + SiteId int `db:"site_id" json:"siteId"` + SeId int `db:"se_id" json:"seId"` SeFid string `db:"se_fid" json:"seFid"` - DsId string `db:"ds_id" json:"dsId"` + DsId int `db:"ds_id" json:"dsId"` SiteFid string `db:"site_fid" json:"siteFid"` - SearchDate time.Time `db:"search_date" json:"searchDate"` + SearchDate *string `db:"search_date" json:"searchDate"` + SearchDateTime time.Time `db:"search_date" json:"searchDateTime"` Recorder string `db:"recorder" json:"recorder"` SearchTypeCode string `db:"search_type_code" json:"searchTypeCode"` - SearchDay time.Time `db:"search_day" json:"searchDay"` + SearchDay *int `db:"search_day" json:"searchDay"` StartTime string `db:"start_time" json:"startTime"` StartLatitude float64 `db:"start_latitude" json:"startLatitude"` StartLongitude float64 `db:"start_longitude" json:"startLongitude"` StopTime string `db:"stop_time" json:"stopTime"` StopLatitude float64 `db:"stop_latitude" json:"stopLatitude"` StopLongitude float64 `db:"stop_longitude" json:"stopLongitude"` - Temp float64 `db:"temp" json:"temp"` - Conductivity float64 `db:"conductivity" json:"conductivity"` + Temp *float64 `db:"temp" json:"temp"` + Conductivity *float64 `db:"conductivity" json:"conductivity"` + TelemetryCount int `db:"telemetry_count" json:"telemetryCount"` + BkgColor string `db:"bkg_color" json:"bkgColor"` + Checkby string `db:"checkby" json:"checkby"` + EditInitials string `db:"edit_initials" json:"editInitials"` + LastEditComment string `db:"last_edit_comment" json:"lastEditComment"` LastUpdated time.Time `db:"last_updated" json:"lastUpdated"` UploadSessionId int `db:"upload_session_id" json:"uploadSessionId"` UploadedBy string `db:"uploaded_by" json:"uploadedBy"` UploadFilename string `db:"upload_filename" json:"uploadFilename"` } +type ProcedureDataEntryWithCount struct { + Items []UploadProcedure `json:"items"` + TotalCount int `json:"totalCount"` +} + +type UploadProcedureData struct { + Items []UploadProcedure `json:"items"` + UploadFilename string `db:"upload_filename" json:"uploadFilename"` +} + type UploadProcedure struct { Id int `db:"id" json:"id"` - FFid string `db:"f_fid" json:"f_fid"` - PurposeCode string `db:"purpose_code" json:"purposeCode"` - ProcedurDate time.Time `db:"procedure_date" json:"procedurDate"` + Fid int `db:"f_id" json:"fid"` + FFid string `db:"f_fid" json:"fFid"` + MrFid string `db:"MR_FID" json:"mrFid"` + Sid int `db:"s_id" json:"sid"` + SiteID int `db:"site_id" json:"siteId"` + PurposeCode string `db:"purpose_code" json:"purpose"` + ProcedureDate *string `db:"PROCEDURE_DATE" json:"procedureDate"` + ProcedureDateTime time.Time `db:"PROCEDURE_DATE" json:"procedureDateTime"` ProcedureStartTime string `db:"procedure_start_time" json:"procedureStartTime"` ProcedureEndTime string `db:"procedure_end_time" json:"procedureEndTime"` ProcedureBy string `db:"procedure_by" json:"procedureBy"` - AntibioticInjectionInd int `db:"antibiotic_injection_ind" json:"antibioticInjectionInd"` - PhotoDorsalInd int `db:"photo_dorsal_ind" json:"photoDorsalInd"` - PhotoVentralInd int `db:"photo_ventral_ind" json:"photoVentralInd"` - PhotoLeftInd int `db:"photo_left_ind" json:"photoLeftInd"` - OldRadioTagNum int `db:"old_radio_tag_num" json:"oldRadioTagNum"` - OldFrequencyId int `db:"old_frequency_id" json:"oldFrequencyId"` - DstSerialNum int `db:"dst_serial_num" json:"dstSerialNum"` - DstStartDate time.Time `db:"dst_start_date" json:"dstStartDate"` + AntibioticInjectionInd *int `db:"ANTIBIOTIC_INJECTION_IND" json:"antibioticInjection"` + PhotoDorsalInd *int `db:"PHOTO_DORSAL_IND" json:"pDorsal"` + PhotoVentralInd *int `db:"PHOTO_VENTRAL_IND" json:"pVentral"` + PhotoLeftInd *int `db:"PHOTO_LEFT_IND" json:"pLeft"` + OldRadioTagNum *int `db:"old_radio_tag_num" json:"oldRadioTagNum"` + OldFrequencyId *int `db:"OLD_FREQUENCY_ID" json:"oldFrequencyId"` + DstSerialNum *int `db:"dst_serial_num" json:"dstSerialNum"` + DstStartDate *string `db:"dst_start_date" json:"dstStartDate"` + DstStartDateTime time.Time `db:"dst_start_date" json:"dstStartDateTime"` DstStartTime string `db:"dst_start_time" json:"dstStartTime"` - DstReimplantInd int `db:"dst_reimplant_ind" json:"dstReimplantInd"` - NewRadioTagNum int `db:"new_radio_tag_num" json:"newRadioTagNum"` - NewFrequencyId int `db:"new_frequency_id" json:"newFrequencyId"` - SexCode string `db:"sex_code" json:"sexCode"` - BloodSampleInd int `db:"blood_sample_ind" json:"bloodSampleInd"` - EggSampleInd int `db:"egg_sample_ind" json:"eggSampleInd"` + DstReimplantInd *int `db:"DST_REIMPLANT_IND" json:"dstReimplant"` + NewRadioTagNum *int `db:"new_radio_tag_num" json:"newRadioTagNum"` + NewFrequencyId *int `db:"NEW_FREQUENCY_ID" json:"newFreqId"` + SexCode string `db:"sex_code" json:"sex"` + BloodSampleInd *int `db:"BLOOD_SAMPLE_IND" json:"bloodSample"` + EggSampleInd *int `db:"EGG_SAMPLE" json:"eggSample"` Comments string `db:"comments" json:"comments"` - FishHealthComments string `db:"fish_health_comments" json:"fishHealthComments"` - EvalLocationCode string `db:"eval_location_code" json:"evalLocationCode"` - SpawnCode string `db:"spawn_code" json:"spawnCode"` - VisualReproStatusCode string `db:"visual_repro_status_code" json:"visualReproStatusCode"` - UltrasoundReproStatusCode string `db:"ultrasound_repro_status_code" json:"ultrasoundReproStatusCode"` - ExpectedSpawnYear int `db:"expected_spawn_year" json:"expectedSpawnYear"` - UltrasoundGonadLength float64 `db:"ultrasound_gonad_length" json:"ultrasoundGonadLength"` + FishHealthComments string `db:"FISH_HEALTH_COMMENTS" json:"fishHealthComment"` + EvalLocationCode string `db:"EVAL_LOCATION_CODE" json:"evalLocation"` + SpawnStatus string `db:"SPAWN_CODE" json:"spawnStatus"` + VisualReproStatusCode string `db:"VISUAL_REPRO_STATUS" json:"visualReproStatus"` + UltrasoundReproStatusCode string `db:"ULTRASOUND_REPRO_STATUS" json:"ultrasoundReproStatus"` + ExpectedSpawnYear *int `db:"EXPECTED_SPAWN_YEAR" json:"expectedSpawnYear"` + UltrasoundGonadLength *float64 `db:"ultrasound_gonad_length" json:"ultrasoundGonadLength"` GonadCondition string `db:"gonad_condition" json:"gonadCondition"` + EditInitials string `db:"edit_initials" json:"editInitials"` + LastEditComment string `db:"last_edit_comment" json:"lastEditComment"` LastUpdated time.Time `db:"last_updated" json:"lastUpdated"` UploadSessionId int `db:"upload_session_id" json:"uploadSessionId"` UploadedBy string `db:"uploaded_by" json:"uploadedBy"` UploadFilename string `db:"upload_filename" json:"uploadFilename"` + Checkby string `db:"checkby" json:"checkby"` +} + +type SupplementalDataEntryWithCount struct { + Items []UploadSupplemental `json:"items"` + TotalCount int `json:"totalCount"` +} + +type UploadSupplementalData struct { + Items []UploadSupplemental `json:"items"` + UploadFilename string `db:"upload_filename" json:"uploadFilename"` } type UploadSupplemental struct { + Id *int `db:"id" json:"id"` + Sid int `db:"s_id" json:"sid"` + Fid int `db:"f_id" json:"fid"` SiteID int `db:"site_id" json:"siteId"` FFid string `db:"f_fid" json:"fFid"` + MrId int `db:"mr_id" json:"mrId"` + NetRiverMile *float64 `db:"netrivermile" json:"netrivermile"` + Length *float32 `db:"length" json:"length"` + Weight *float32 `db:"weight" json:"weight"` + Condition *float64 `db:"condition" json:"condition"` MrFid string `db:"mr_fid" json:"mrFid"` Tagnumber string `db:"tagnumber" json:"tagnumber"` Pitrn string `db:"pitrn" json:"pitrn"` Scuteloc string `db:"scuteloc" json:"scuteloc"` - Scutenum float64 `db:"scutenum" json:"scutenum"` + Scutenum *int `db:"scutenum" json:"scutenum"` Scuteloc2 string `db:"scuteloc2" json:"scuteloc2"` - Scutenum2 float64 `db:"scutenum2" json:"scutenum2"` + Scutenum2 *int `db:"scutenum2" json:"scutenum2"` Elhv string `db:"elhv" json:"elhv"` Elcolor string `db:"elcolor" json:"elcolor"` Erhv string `db:"erhv" json:"erhv"` @@ -135,57 +570,80 @@ type UploadSupplemental struct { Dangler string `db:"dangler" json:"dangler"` Genetic string `db:"genetic" json:"genetic"` GeneticsVialNumber string `db:"genetics_vial_number" json:"geneticsVialNumber"` - Broodstock float64 `db:"broodstock" json:"broodstock"` - HatchWild float64 `db:"hatch_wild" json:"hatchWild"` - SpeciesId int `db:"species_id" json:"speciesId"` - Archive int `db:"archive" json:"archive"` - Head float64 `db:"head" json:"head"` - Snouttomouth float64 `db:"snouttomouth" json:"snouttomouth"` - Inter float64 `db:"inter" json:"inter"` - Mouthwidth float64 `db:"mouthwidth" json:"mouthwidth"` - MIb float64 `db:"m_ib" json:"mIb"` - LOb float64 `db:"l_ob" json:"lOb"` - LIb float64 `db:"l_ib" json:"lIb"` - RIb float64 `db:"r_ib" json:"rIb"` - ROb float64 `db:"r_ob" json:"rOb"` - Anal float64 `db:"anal" json:"anal"` - Dorsal float64 `db:"dorsal" json:"dorsal"` + Broodstock *int `db:"broodstock" json:"broodstock"` + HatchWild *int `db:"hatch_wild" json:"hatchWild"` + SpeciesId *int `db:"species_id" json:"speciesId"` + Species *string `db:"species" json:"species"` + Archive *int `db:"archive" json:"archive"` + Head *int `db:"head" json:"head"` + Snouttomouth *int `db:"snouttomouth" json:"snouttomouth"` + Inter *int `db:"inter" json:"inter"` + Mouthwidth *int `db:"mouthwidth" json:"mouthwidth"` + MIb *int `db:"m_ib" json:"mIb"` + LOb *int `db:"l_ob" json:"lOb"` + LIb *int `db:"l_ib" json:"lIb"` + RIb *int `db:"r_ib" json:"rIb"` + ROb *int `db:"r_ob" json:"rOb"` + Anal *int `db:"anal" json:"anal"` + Dorsal *int `db:"dorsal" json:"dorsal"` Status string `db:"status" json:"status"` HatcheryOrigin string `db:"hatchery_origin" json:"hatcheryOrigin"` Sex string `db:"sex" json:"sex"` Stage string `db:"stage" json:"stage"` Recapture string `db:"recapture" json:"recapture"` Photo string `db:"photo" json:"photo"` + GeneticNeeds string `db:"genetic_needs" json:"geneticNeeds"` + OtherTagInfo string `db:"other_tag_info" json:"otherTagInfo"` + Complete *int `db:"complete" json:"complete"` + Approved *int `db:"approved" json:"approved"` Comments string `db:"comments" json:"comments"` + Checkby string `db:"checkby" json:"checkby"` + Recorder string `db:"recorder" json:"recorder"` + EditInitials string `db:"edit_initials" json:"editInitials"` + LastEditComment string `db:"last_edit_comment" json:"lastEditComment"` LastUpdated time.Time `db:"last_updated" json:"lastUpdated"` UploadSessionId int `db:"upload_session_id" json:"uploadSessionId"` UploadedBy string `db:"uploaded_by" json:"uploadedBy"` UploadFilename string `db:"upload_filename" json:"uploadFilename"` } -type UploadMr struct { +type MoriverDataEntryWithCount struct { + Items []UploadMoriver `json:"items"` + TotalCount int `json:"totalCount"` +} + +type UploadMoriverData struct { + Items []UploadMoriver `json:"items"` + UploadFilename string `db:"upload_filename" json:"uploadFilename"` +} + +type UploadMoriver struct { SiteID int `db:"site_id" json:"siteId"` SiteFid string `db:"site_fid" json:"siteFid"` + MrID int `db:"mr_id" json:"mrId"` MrFid string `db:"mr_fid" json:"mrFid"` + SeFieldID string `db:"se_field_id" json:"seFieldId"` Season string `db:"season" json:"season"` - Setdate time.Time `db:"setdate" json:"setdate"` - Subsample string `db:"subsample" json:"subsample"` + SetDate *string `db:"setdate" json:"setdate"` + SetDateTime time.Time `db:"setdate" json:"setDateTime"` + Subsample float64 `db:"subsample" json:"subsample"` Subsamplepass float64 `db:"subsamplepass" json:"subsamplepass"` + SubsampleROrN string `db:"subsample_r_or_n" json:"subsampleROrN"` Subsamplen string `db:"subsamplen" json:"subsamplen"` Recorder string `db:"recorder" json:"recorder"` Gear string `db:"gear" json:"gear"` GearType string `db:"gear_type" json:"gearType"` Temp float64 `db:"temp" json:"temp"` - Turbidity float64 `db:"turbidity" json:"turbidity"` - Conductivity float64 `db:"conductivity" json:"conductivity"` - Do float64 `db:"do" json:"do"` - Distance float64 `db:"distance" json:"distance"` - Width float64 `db:"width" json:"width"` - Netrivermile float64 `db:"netrivermile" json:"netrivermile"` + Turbidity *float64 `db:"turbidity" json:"turbidity"` + Conductivity *float64 `db:"conductivity" json:"conductivity"` + Do *float64 `db:"do" json:"dissolvedOxygen"` + Distance *float64 `db:"distance" json:"distance"` + Width *float64 `db:"width" json:"width"` + Netrivermile *float64 `db:"netrivermile" json:"netrivermile"` Structurenumber string `db:"structurenumber" json:"structurenumber"` Usgs string `db:"usgs" json:"usgs"` - Riverstage float64 `db:"riverstage" json:"riverstage"` - Discharge float64 `db:"discharge" json:"discharge"` + Riverstage *float64 `db:"riverstage" json:"riverstage"` + Discharge *float64 `db:"discharge" json:"discharge"` U1 string `db:"u1" json:"u1"` U2 string `db:"u2" json:"u2"` U3 string `db:"u3" json:"u3"` @@ -200,66 +658,244 @@ type UploadMr struct { MicroStructure string `db:"micro_structure" json:"microStructure"` StructureFlow string `db:"structure_flow" json:"structureFlow"` StructureMod string `db:"structure_mod" json:"structureMod"` - SetSite1 string `db:"set_site_1" json:"setSite_1"` - SetSite2 string `db:"set_site_2" json:"setSite_2"` - SetSite3 string `db:"set_site_3" json:"setSite_3"` + SetSite1 string `db:"set_site_1" json:"setSite1"` + SetSite2 string `db:"set_site_2" json:"setSite2"` + SetSite3 string `db:"set_site_3" json:"setSite3"` StartTime string `db:"starttime" json:"startTime"` - StartLatitude float64 `db:"startlatitude" json:"startLatitude"` - StartLongitude float64 `db:"startlongitude" json:"startLongitude"` - StopTime string `db:"stoptime" json:"stopTime"` - StopLatitude float64 `db:"stoplatitude" json:"stopLatitude"` - StopLongitude float64 `db:"stop_longitude" json:"stopLongitude"` - Depth1 float64 `db:"depth1" json:"depth1"` - Velocitybot1 float64 `db:"velocitybot1" json:"velocitybot1"` - Velocity08_1 float64 `db:"velocity08_1" json:"velocity08_1"` - Velocity02or06_1 float64 `db:"velocity02or06_1" json:"velocity02or06_1"` - Depth2 float64 `db:"depth2" json:"depth2"` - Velocitybot2 float64 `db:"velocitybot2" json:"velocitybot2"` - Velocity08_2 float64 `db:"velocity08_2" json:"velocity08_2"` - Velocity02or06_2 float64 `db:"velocity02or06_2" json:"velocity02or06_2"` - Depth3 float64 `db:"depth3" json:"depth3"` - Velocitybot3 float64 `db:"velocitybot3" json:"velocitybot3"` - Velocity08_3 float64 `db:"velocity08_3" json:"velocity08_3"` - Velocity02or06_3 float64 `db:"velocity02or06_3" json:"velocity02or06_3"` - Watervel float64 `db:"watervel" json:"watervel"` - Cobble float64 `db:"cobble" json:"cobble"` - Organic float64 `db:"organic" json:"organic"` - Silt float64 `db:"silt" json:"silt"` - Sand float64 `db:"sand" json:"sand"` - Gravel float64 `db:"gravel" json:"gravel"` + StartLatitude float64 `db:"startlatitude" json:"startlatitude"` + StartLongitude float64 `db:"startlongitude" json:"startlongitude"` + StopTime string `db:"stoptime" json:"stoptime"` + StopLatitude *float64 `db:"stoplatitude" json:"stoplatitude"` + StopLongitude *float64 `db:"stop_longitude" json:"stoplongitude"` + Depth1 *float64 `db:"depth1" json:"depth1"` + Velocitybot1 *float64 `db:"velocitybot1" json:"velocitybot1"` + Velocity08_1 *float64 `db:"velocity08_1" json:"velocity081"` + Velocity02or06_1 *float64 `db:"velocity02or06_1" json:"velocity02or061"` + Depth2 *float64 `db:"depth2" json:"depth2"` + Velocitybot2 *float64 `db:"velocitybot2" json:"velocitybot2"` + Velocity08_2 *float64 `db:"velocity08_2" json:"velocity082"` + Velocity02or06_2 *float64 `db:"velocity02or06_2" json:"velocity02or062"` + Depth3 *float64 `db:"depth3" json:"depth3"` + Velocitybot3 *float64 `db:"velocitybot3" json:"velocitybot3"` + Velocity08_3 *float64 `db:"velocity08_3" json:"velocity083"` + Velocity02or06_3 *float64 `db:"velocity02or06_3" json:"velocity02or063"` + Watervel *float64 `db:"watervel" json:"watervel"` + Cobble *float64 `db:"cobble" json:"cobble"` + Organic *float64 `db:"organic" json:"organic"` + Silt *float64 `db:"silt" json:"silt"` + Sand *float64 `db:"sand" json:"sand"` + Gravel *float64 `db:"gravel" json:"gravel"` Comments string `db:"comments" json:"comments"` - Complete float64 `db:"complete" json:"complete"` + Complete *float64 `db:"complete" json:"complete"` Checkby string `db:"checkby" json:"checkby"` NoTurbidity string `db:"no_turbidity" json:"noTurbidity"` NoVelocity string `db:"no_velocity" json:"noVelocity"` + EditInitials string `db:"edit_initials" json:"editInitials"` + LastEditComment string `db:"last_edit_comment" json:"lastEditComment"` + Project *int `db:"PROJECT_ID" json:"project"` + FieldOffice string `db:"FIELDOFFICE" json:"fieldOffice"` + Segment *int `db:"SEGMENT_ID" json:"segment"` + BkgColor string `db:"bkg_color" json:"bkgColor"` + SuppBkgColor string `db:"supp_bkg_color" json:"suppBkgColor"` + ProcBkgColor string `db:"proc_bkg_color" json:"procBkgColor"` + FishCount int `db:"fish_count" json:"fishCount"` + SuppCount int `db:"supp_count" json:"suppCount"` + ProcCount int `db:"proc_count" json:"procCount"` + Bend int `db:"bend" json:"bend"` + BendRn string `db:"bendrn" json:"bendrn"` + BendRiverMile float64 `db:"bendrivermile" json:"bendrivermile"` LastUpdated time.Time `db:"last_updated" json:"lastUpdated"` UploadSessionId int `db:"upload_session_id" json:"uploadSessionId"` UploadedBy string `db:"uploaded_by" json:"uploadedBy"` UploadFilename string `db:"upload_filename" json:"uploadFilename"` } -type UploadTelemetryFish struct { +type TelemetryDataEntryWithCount struct { + Items []UploadTelemetry `json:"items"` + TotalCount int `json:"totalCount"` +} + +type UploadTelemetryData struct { + Items []UploadTelemetry `json:"items"` + UploadFilename string `db:"upload_filename" json:"uploadFilename"` +} + +type UploadTelemetry struct { + TId int `db:"t_id" json:"tId"` TFid string `db:"t_fid" json:"tFid"` - SeFid string `db:"se_fid" json:"seFid"` - Bend float64 `db:"bend" json:"bend"` + SeFid string `db:"se_fid" json:"seFieldId"` + SeId int `db:"se_id" json:"seId"` + SiteId int `db:"site_id" json:"siteId"` + Bend *float64 `db:"bend" json:"bend"` RadioTagNum int `db:"radio_tag_num" json:"radioTagNum"` FrequencyIdCode int `db:"frequency_id_code" json:"frequencyIdCode"` - CaptureTime string `db:"capture_time" json:"captureTime"` + CaptureTime string `db:"capture_time" json:"captureDate"` CaptureLatitude float64 `db:"capture_latitude" json:"captureLatitude"` - CaptureLongitude float64 `db:"capture_longitude" json:"capture_Longitude"` - PositionConfidence float64 `db:"position_confidence" json:"positionConfidence"` + CaptureLongitude float64 `db:"capture_longitude" json:"captureLongitude"` + PositionConfidence *float64 `db:"position_confidence" json:"positionConfidence"` MacroId string `db:"macro_id" json:"macroId"` MesoId string `db:"meso_id" json:"mesoId"` - Depth float64 `db:"depth" json:"depth"` - Temp float64 `db:"temp" json:"temp"` - Conductivity float64 `db:"conductivity" json:"conductivity"` - Turbidity float64 `db:"turbidity" json:"turbidity"` - Silt float64 `db:"silt" json:"silt"` - Sand float64 `db:"sand" json:"sand"` - Gravel float64 `db:"gravel" json:"gravel"` + Depth *float64 `db:"depth" json:"depth"` + Temp *float64 `db:"temp" json:"temp"` + Conductivity *float64 `db:"conductivity" json:"conductivity"` + Turbidity *float64 `db:"turbidity" json:"turbidity"` + Silt *float64 `db:"silt" json:"silt"` + Sand *float64 `db:"sand" json:"sand"` + Gravel *float64 `db:"gravel" json:"gravel"` + Checkby string `db:"checkby" json:"checkby"` + EditInitials string `db:"edit_initials" json:"editInitials"` + LastEditComment string `db:"last_edit_comment" json:"lastEditComment"` Comments string `db:"comments" json:"comments"` LastUpdated time.Time `db:"last_updated" json:"lastUpdated"` UploadSessionId int `db:"upload_session_id" json:"uploadSessionId"` UploadedBy string `db:"uploaded_by" json:"uploadedBy"` UploadFilename string `db:"upload_filename" json:"uploadFilename"` } + +type ProcedureOut struct { + UploadSessionId int `json:"uploadSessionId"` + UploadedBy string `json:"uploadedBy"` + SiteCntFinal int `json:"siteCntFinal"` + MrCntFinal int `json:"mrCntFinal"` + FishCntFinal int `json:"fishCntFinal"` + SearchCntFinal int `json:"searchCntFinal"` + SuppCntFinal int `json:"suppCntFinal"` + TelemetryCntFinal int `json:"telemetryCntFinal"` + ProcedureCntFinal int `json:"procedureCntFinal"` + NoSiteCnt int `json:"noSiteCnt"` + SiteMatch int `json:"siteMatch"` + NoSiteIDMsg string `json:"noSiteIDMsg"` +} + +type UsgNoVialNumber struct { + Fp string `db:"fp" json:"fp"` + SpeciesCode string `db:"SPECIES" json:"speciesCode"` + FID int `db:"f_id" json:"fId"` + MrID int `db:"mr_id" json:"mrID"` + MrsiteID int `db:"mrsite_id" json:"mrsiteId"` + SSiteID int `db:"s_site_id" json:"sSiteID"` + FFID string `db:"f_fid" json:"fFId"` + GeneticsVialNumber string `db:"genetics_vial_number" json:"GeneticsVialNumber"` +} + +type UnapprovedDataWithCount struct { + Items []UnapprovedData `json:"items"` + TotalCount int `json:"totalCount"` +} + +type UnapprovedData struct { + Ch string `db:"ch" json:"ch"` + Fp string `db:"fp" json:"fp"` + SegmentDescription string `db:"segment_description" json:"segmentDescription"` + Bend int `db:"bend" json:"bend"` + MrId int `db:"mr_id" json:"mrId"` + // SetDate time.Time `db:"setdate" json:"setdate"` + Subsample int `db:"subsampple" json:"subsample"` + Recorder string `db:"recorder" json:"recorder"` + Checkby string `db:"checkby" json:"checkby"` + NetRiverMile float64 `db:"netrivermile" json:"netrivermile"` + SiteId int `db:"site_id" json:"siteId"` + ProjectId int `db:"project_id" json:"projectId"` + SegmentId int `db:"segment_id" json:"segmentId"` + Season string `db:"season" json:"season"` + FieldOffice string `db:"fieldoffice" json:"fieldoffice"` + SampleUnitType string `db:"sample_unit_type" json:"sampleUnitType"` + Gear string `db:"gear" json:"gear"` +} + +type BafiDataWithCount struct { + Items []BafiData `json:"items"` + TotalCount int `json:"totalCount"` +} + +type BafiData struct { + Psb string `db:"psb" json:"psb"` + SiteId int `db:"site_id" json:"siteId"` + FieldOffice string `db:"fieldoffice" json:"fieldoffice"` + FId int `db:"f_id" json:"fId"` + MrId int `db:"mr_id" json:"mrId"` + MrFid string `db:"mr_fid" json:"mrFid"` + Species string `db:"species" json:"species"` + Recorder string `db:"recorder" json:"recorder"` + Subsample int `db:"subsample" json:"subsample"` + Gear string `db:"gear" json:"gear"` + FishCount int `db:"fishcount" json:"fishcount"` + Year int `db:"year" json:"year"` + SegmentId int `db:"segment_id" json:"segmentId"` + Bend int `db:"bend" json:"bend"` + Bendrn string `db:"bendrn" json:"bendrn"` + BendRiverMile float64 `db:"bendrivermile" json:"bendrivermile"` + PanelHook string `db:"panelhook" json:"panelhook"` +} + +type UncheckedDataWithCount struct { + Items []UncheckedData `json:"items"` + TotalCount int `json:"totalCount"` +} + +type UncheckedData struct { + Cb string `db:"cb" json:"cb"` + Psb string `db:"psb" json:"psb"` + MrID int `db:"mr_id" json:"mrID"` + Subsample int `db:"subsample" json:"subsample"` + Recorder string `db:"recorder" json:"recorder"` + Checkby string `db:"checkby" json:"checkby"` + Netrivermile float64 `db:"netrivermile" json:"netrivermile"` + SiteID int `db:"site_id" json:"siteId"` + ProjectID int `db:"project_id" json:"projectId"` + SegmentID int `db:"segment_id" json:"segmentId"` + Season string `db:"season" json:"season"` + FieldOffice string `db:"fieldoffice" json:"fieldoffice"` + Gear string `db:"gear" json:"gear"` +} + +type ErrorCount struct { + Year int `db:"year" json:"year"` + Count int `db:"count(el.el_id)" json:"count"` +} + +type OfficeErrorLog struct { + ElID int `db:"el_id" json:"elId"` + SiteID int `db:"site_id" json:"siteId"` + Year int `db:"year" json:"year"` + ErrorEntryDate time.Time `db:"error_entry_date" json:"errorEntryDate"` + WorksheetID int `db:"worksheet_id" json:"worksheetId"` + WorksheetTypeID int `db:"worksheet_type_id" json:"worksheetTypeId"` + FieldID int `db:"field_id" json:"fieldId"` + FormID int `db:"form_id" json:"formId"` + ErrorDescription string `db:"error_description" json:"errorDescription"` + ErrorStatus int `db:"error_fixed" json:"errorFixed"` + ErrorUpdateDate time.Time `db:"error_fixed_date" json:"errorFixedDate"` +} + +type DownloadInfo struct { + Name string `db:"name" json:"name"` + DisplayName string `db:"display_name" json:"displayName"` + LastUpdated string `db:"last_updated" json:"lastUpdated"` +} + +type UploadSessionLog struct { + DebugText string `db:"debug_text" json:"debugText"` + DateCreated string `db:"date_created" json:"dateCreated"` + PUser string `db:"p_user" json:"pUser"` + UploadSessionId string `db:"upload_session_id" json:"uploadSessionId"` +} + +type ExportSite struct { + SiteID int `db:"site_id" json:"SITE_ID"` + SiteYear int `db:"year" json:"SITE_YEAR"` + SiteFID *string `db:"site_fid" json:"SITE_FID"` + FieldOfficeID string `db:"fieldoffice" json:"FIELDOFFICE_ID"` + FieldOffice string `db:"field_office_description" json:"FIELD_OFFICE"` + ProjectId int `db:"project_id" json:"PROJECT_ID"` + Project string `db:"project_description" json:"PROJECT"` + SegmentId int `db:"segment_id" json:"SEGMENT_ID"` + Segment string `db:"segment_description" json:"SEGMENT"` + SeasonId string `db:"season" json:"SEASON_ID"` + Season string `db:"season_description" json:"SEASON"` + SampleUnitType string `db:"sample_unit_type" json:"SAMPLE UNIT TYPE"` + Bend int `db:"bend" json:"BEND"` + Bendrn string `db:"bendrn" json:"BENDRN"` + BendRiverMile *float64 `db:"bend_river_mile" json:"BENDRIVERMILE"` + SampleUnitDesc string `db:"sample_unit_desc" json:"SAMPLE_UNIT_DESC"` +} diff --git a/server.go b/server.go index 3008e20..f6e5d1a 100644 --- a/server.go +++ b/server.go @@ -1,16 +1,13 @@ package main import ( - //"log" + . "github.com/USACE/pallid_sturgeon_api/server/auth" - //. "di2e.net/cwbi/pallid_sturgeon_api/server/auth" "log" - "di2e.net/cwbi/pallid_sturgeon_api/server/config" - "di2e.net/cwbi/pallid_sturgeon_api/server/handlers" - "di2e.net/cwbi/pallid_sturgeon_api/server/stores" - - //"di2e.net/cwbi/pallid_sturgeon_api/server/stores" + "github.com/USACE/pallid_sturgeon_api/server/config" + "github.com/USACE/pallid_sturgeon_api/server/handlers" + "github.com/USACE/pallid_sturgeon_api/server/stores" "github.com/labstack/echo/v4" "github.com/labstack/echo/v4/middleware" @@ -20,24 +17,23 @@ var urlContext string = "/psapi" func main() { appconfig := config.GetEnv() - //auth := Auth{} + auth := Auth{} - //err := LoadVerificationKeys(appconfig.IPPK) - // err := auth.LoadVerificationKey(appconfig.IPPK) - // if err != nil { - // log.Fatalf("Unable to load a verification key:%s.\nShutting down.", err) - // } + err := auth.LoadVerificationKey(appconfig.IPPK) + if err != nil { + log.Fatalf("Unable to load a verification key:%s.\nShutting down.", err) + } pallidSturgeonStore, err := stores.InitStores(appconfig) if err != nil { log.Printf("Unable to connect to the Main Pallid Sturgeon database: %s", err) } - // authStore, err := stores.InitAuthStore(appconfig) - // if err != nil { - // log.Printf("Unable to connect to the Auth database: %s", err) - // } + authStore, err := stores.InitAuthStore(appconfig) + if err != nil { + log.Printf("Unable to connect to the Auth database: %s", err) + } - //auth.Store = authStore + auth.Store = authStore e := echo.New() e.Use(middleware.Logger()) @@ -47,18 +43,103 @@ func main() { Store: pallidSturgeonStore, } - // userH := handlers.UserHandler{ - // Config: appconfig, - // } + userH := handlers.UserHandler{ + Store: authStore, + } e.GET(urlContext+"/version", PallidSturgeonH.Version) - e.GET(urlContext+"/seasons", PallidSturgeonH.GetSeasons) - e.POST(urlContext+"/siteUpload", PallidSturgeonH.SiteUpload) - e.POST(urlContext+"/fishUpload", PallidSturgeonH.FishUpload) - e.POST(urlContext+"/searchUpload", PallidSturgeonH.SearchUpload) - e.POST(urlContext+"/supplementalUpload", PallidSturgeonH.SupplementalUpload) - e.POST(urlContext+"/procedureUpload", PallidSturgeonH.ProcedureUpload) - e.POST(urlContext+"/mrUpload", PallidSturgeonH.MrUpload) - e.POST(urlContext+"/telemetryFishUpload", PallidSturgeonH.TelemetryFishUpload) - e.Logger.Fatal(e.Start(":8080")) + + e.GET(urlContext+"/projects", auth.Authorize(PallidSturgeonH.GetProjects, PUBLIC)) + e.GET(urlContext+"/projectsFilter", auth.Authorize(PallidSturgeonH.GetProjectsFilter, PUBLIC)) + e.GET(urlContext+"/roles", auth.Authorize(PallidSturgeonH.GetRoles, PUBLIC)) + e.GET(urlContext+"/fieldOffices", auth.Authorize(PallidSturgeonH.GetFieldOffices, PUBLIC)) + e.GET(urlContext+"/seasons", auth.Authorize(PallidSturgeonH.GetSeasons, PUBLIC)) + e.GET(urlContext+"/segments", auth.Authorize(PallidSturgeonH.GetSegments, PUBLIC)) + e.GET(urlContext+"/fieldOffices", auth.Authorize(PallidSturgeonH.GetFieldOffices, PUBLIC)) + e.GET(urlContext+"/sampleUnitTypes", auth.Authorize(PallidSturgeonH.GetSampleUnitTypes, PUBLIC)) + e.GET(urlContext+"/sampleUnit", auth.Authorize(PallidSturgeonH.GetSampleUnit, PUBLIC)) + e.GET(urlContext+"/bendRn", auth.Authorize(PallidSturgeonH.GetBendRn, PUBLIC)) + e.GET(urlContext+"/meso", auth.Authorize(PallidSturgeonH.GetMeso, PUBLIC)) + e.GET(urlContext+"/structureFlow", auth.Authorize(PallidSturgeonH.GetStructureFlow, PUBLIC)) + e.GET(urlContext+"/structureMod", auth.Authorize(PallidSturgeonH.GetStructureMod, PUBLIC)) + e.GET(urlContext+"/species", auth.Authorize(PallidSturgeonH.GetSpecies, PUBLIC)) + e.GET(urlContext+"/ftPrefix", auth.Authorize(PallidSturgeonH.GetFtPrefixes, PUBLIC)) + e.GET(urlContext+"/mr", auth.Authorize(PallidSturgeonH.GetMr, PUBLIC)) + e.GET(urlContext+"/otolith", auth.Authorize(PallidSturgeonH.GetOtolith, PUBLIC)) + e.GET(urlContext+"/setsite1", auth.Authorize(PallidSturgeonH.GetSetSite1, PUBLIC)) + e.GET(urlContext+"/setsite2", auth.Authorize(PallidSturgeonH.GetSetSite2, PUBLIC)) + e.GET(urlContext+"/years", auth.Authorize(PallidSturgeonH.GetYears, PUBLIC)) + + e.GET(urlContext+"/siteDataEntry", auth.Authorize(PallidSturgeonH.GetSiteDataEntries, PUBLIC)) + e.POST(urlContext+"/siteDataEntry", auth.Authorize(PallidSturgeonH.SaveSiteDataEntry, PUBLIC)) + e.PUT(urlContext+"/siteDataEntry", auth.Authorize(PallidSturgeonH.UpdateSiteDataEntry, PUBLIC)) + e.GET(urlContext+"/fishDataEntry", auth.Authorize(PallidSturgeonH.GetFishDataEntries, PUBLIC)) + e.POST(urlContext+"/fishDataEntry", auth.Authorize(PallidSturgeonH.SaveFishDataEntry, PUBLIC)) + e.PUT(urlContext+"/fishDataEntry", auth.Authorize(PallidSturgeonH.UpdateFishDataEntry, PUBLIC)) + e.DELETE(urlContext+"/fishDataEntry/:id", auth.Authorize(PallidSturgeonH.DeleteFishDataEntry, PUBLIC)) + e.GET(urlContext+"/moriverDataEntry", auth.Authorize(PallidSturgeonH.GetMoriverDataEntries, PUBLIC)) + e.POST(urlContext+"/moriverDataEntry", auth.Authorize(PallidSturgeonH.SaveMoriverDataEntry, PUBLIC)) + e.PUT(urlContext+"/moriverDataEntry", auth.Authorize(PallidSturgeonH.UpdateMoriverDataEntry, PUBLIC)) + e.GET(urlContext+"/supplementalDataEntry", auth.Authorize(PallidSturgeonH.GetSupplementalDataEntries, PUBLIC)) + e.POST(urlContext+"/supplementalDataEntry", auth.Authorize(PallidSturgeonH.SaveSupplementalDataEntry, PUBLIC)) + e.PUT(urlContext+"/supplementalDataEntry", auth.Authorize(PallidSturgeonH.UpdateSupplementalDataEntry, PUBLIC)) + e.DELETE(urlContext+"/supplementalDataEntry/:id", auth.Authorize(PallidSturgeonH.DeleteSupplementalDataEntry, PUBLIC)) + e.GET(urlContext+"/searchDataEntry", auth.Authorize(PallidSturgeonH.GetSearchDataEntries, PUBLIC)) + e.POST(urlContext+"/searchDataEntry", auth.Authorize(PallidSturgeonH.SaveSearchDataEntry, PUBLIC)) + e.PUT(urlContext+"/searchDataEntry", auth.Authorize(PallidSturgeonH.UpdateSearchDataEntry, PUBLIC)) + e.GET(urlContext+"/telemetryDataEntry", auth.Authorize(PallidSturgeonH.GetTelemetryDataEntries, PUBLIC)) + e.POST(urlContext+"/telemetryDataEntry", auth.Authorize(PallidSturgeonH.SaveTelemetryDataEntry, PUBLIC)) + e.PUT(urlContext+"/telemetryDataEntry", auth.Authorize(PallidSturgeonH.UpdateTelemetryDataEntry, PUBLIC)) + e.DELETE(urlContext+"/telemetryDataEntry/:id", auth.Authorize(PallidSturgeonH.DeleteTelemetryDataEntry, PUBLIC)) + e.GET(urlContext+"/procedureDataEntry", auth.Authorize(PallidSturgeonH.GetProcedureDataEntries, PUBLIC)) + e.POST(urlContext+"/procedureDataEntry", auth.Authorize(PallidSturgeonH.SaveProcedureDataEntry, PUBLIC)) + e.PUT(urlContext+"/procedureDataEntry", auth.Authorize(PallidSturgeonH.UpdateProcedureDataEntry, PUBLIC)) + e.DELETE(urlContext+"/procedureDataEntry/:id", auth.Authorize(PallidSturgeonH.DeleteProcedureDataEntry, PUBLIC)) + + e.GET(urlContext+"/fishFullDataSummary", auth.Authorize(PallidSturgeonH.GetFullFishDataSummary, PUBLIC)) + e.GET(urlContext+"/fishDataSummary", auth.Authorize(PallidSturgeonH.GetFishDataSummary, PUBLIC)) + e.GET(urlContext+"/suppFullDataSummary", auth.Authorize(PallidSturgeonH.GetFullSuppDataSummary, PUBLIC)) + e.GET(urlContext+"/suppDataSummary", auth.Authorize(PallidSturgeonH.GetSuppDataSummary, PUBLIC)) + e.GET(urlContext+"/missouriFullDataSummary", auth.Authorize(PallidSturgeonH.GetFullMissouriDataSummary, PUBLIC)) + e.GET(urlContext+"/missouriDataSummary", auth.Authorize(PallidSturgeonH.GetMissouriDataSummary, PUBLIC)) + e.GET(urlContext+"/geneticFullDataSummary", auth.Authorize(PallidSturgeonH.GetFullGeneticDataSummary, PUBLIC)) + e.GET(urlContext+"/geneticDataSummary", auth.Authorize(PallidSturgeonH.GetGeneticDataSummary, PUBLIC)) + e.GET(urlContext+"/searchFullDataSummary", auth.Authorize(PallidSturgeonH.GetFullSearchDataSummary, PUBLIC)) + e.GET(urlContext+"/searchDataSummary", auth.Authorize(PallidSturgeonH.GetSearchDataSummary, PUBLIC)) + e.GET(urlContext+"/telemetryFullDataSummary", auth.Authorize(PallidSturgeonH.GetFullTelemetryDataSummary, PUBLIC)) + e.GET(urlContext+"/telemetryDataSummary", auth.Authorize(PallidSturgeonH.GetTelemetryDataSummary, PUBLIC)) + e.GET(urlContext+"/procedureFullDataSummary", auth.Authorize(PallidSturgeonH.GetFullProcedureDataSummary, PUBLIC)) + e.GET(urlContext+"/procedureDataSummary", auth.Authorize(PallidSturgeonH.GetProcedureDataSummary, PUBLIC)) + e.GET(urlContext+"/missouriDatasheets", auth.Authorize(PallidSturgeonH.GetMissouriDatasheetById, PUBLIC)) + e.GET(urlContext+"/searchDatasheets", auth.Authorize(PallidSturgeonH.GetSearchDatasheetById, PUBLIC)) + + e.GET(urlContext+"/uploadSessionId", auth.Authorize(PallidSturgeonH.GetUploadSessionId, PUBLIC)) + e.POST(urlContext+"/upload", auth.Authorize(PallidSturgeonH.Upload, PUBLIC)) + e.POST(urlContext+"/storeProcedure/:uploadSessionId", auth.Authorize(PallidSturgeonH.CallStoreProcedures, PUBLIC)) + + e.GET(urlContext+"/errorCount", auth.Authorize(PallidSturgeonH.GetErrorCount, PUBLIC)) + e.GET(urlContext+"/officeErrorLog", auth.Authorize(PallidSturgeonH.GetOfficeErrorLogs, PUBLIC)) + e.GET(urlContext+"/usgNoVialNumbers", auth.Authorize(PallidSturgeonH.GetUsgNoVialNumbers, PUBLIC)) + e.GET(urlContext+"/unapprovedDataSheets", auth.Authorize(PallidSturgeonH.GetUnapprovedDataSheets, PUBLIC)) + e.GET(urlContext+"/bafiDataSheets", auth.Authorize(PallidSturgeonH.GetBafiDataSheets, PUBLIC)) + e.GET(urlContext+"/uncheckedDataSheets", auth.Authorize(PallidSturgeonH.GetUncheckedDataSheets, PUBLIC)) + e.POST(urlContext+"/uploadDownloadZip", auth.Authorize(PallidSturgeonH.UploadDownloadZip, PUBLIC)) + + e.GET(urlContext+"/downloadInfo", auth.Authorize(PallidSturgeonH.GetDownloadInfo, PUBLIC)) + e.GET(urlContext+"/downloadZip", auth.Authorize(PallidSturgeonH.GetDownloadZip, PUBLIC)) + e.GET(urlContext+"/uploadSessionLogs", auth.Authorize(PallidSturgeonH.GetUploadSessionLogs, PUBLIC)) + + e.GET(urlContext+"/export/sites", auth.Authorize(PallidSturgeonH.GetSitesExport, PUBLIC)) + + e.GET(urlContext+"/userRoleOffices/:email", auth.Authorize(userH.GetUserRoleOffices, PUBLIC)) + e.GET(urlContext+"/userRoleOffice/:id", auth.Authorize(userH.GetUserRoleOfficeById, PUBLIC)) + e.GET(urlContext+"/userAccessRequests", auth.Authorize(userH.GetUserAccessRequests, ADMIN)) + e.GET(urlContext+"/users", auth.Authorize(userH.GetUsers, PUBLIC)) + e.GET(urlContext+"/userList", auth.Authorize(userH.GetUsers2, ADMIN)) + e.POST(urlContext+"/userRoleOffice", auth.Authorize(userH.AddUserRoleOffice, ADMIN)) + e.PUT(urlContext+"/userRoleOffice", auth.Authorize(userH.UpdateUserRoleOffice, PUBLIC)) + + // e.Logger.Fatal(e.Start(":8080")) + // force update + e.Logger.Debug(e.Start(":8080")) } diff --git a/stores/authstore.go b/stores/authstore.go index e7fbfa9..657a1ff 100644 --- a/stores/authstore.go +++ b/stores/authstore.go @@ -1,20 +1,24 @@ package stores import ( - "database/sql" "fmt" "log" - "di2e.net/cwbi/pallid_sturgeon_api/server/config" - "di2e.net/cwbi/pallid_sturgeon_api/server/models" + //"net/smtp" + + "github.com/USACE/pallid_sturgeon_api/server/config" + "github.com/USACE/pallid_sturgeon_api/server/models" "github.com/jmoiron/sqlx" ) type AuthStore struct { db *sqlx.DB + //config *config.AppConfig } -var userSql = "select * from users where user_id=$1" +var userSql = `select id, edipi, username, email, first_name,last_name from users_t where email=:1` + +var userByIdSql = `select id, edipi, username, email, first_name,last_name from users_t where id=:1` // var userSql = `select id,username,email,rate, // (select bool_or(is_admin) @@ -23,45 +27,344 @@ var userSql = "select * from users where user_id=$1" // from wpt_user // where id=$1 and deleted=false` -var insertUserSql = "insert into users (user_id,user_name,email) values ($1,$2,$3)" +var insertUserSql = `insert into users_t (username,email,first_name,last_name,edipi) values (:1,:2,:3,:4,:5)` + +var getUsersSql = `select uro.id, uro.user_id, u.username, u.first_name, u.last_name, u.email, uro.role_id, r.description, uro.office_id, f.field_office_code, uro.project_code from users_t u + inner join user_role_office_lk uro on uro.user_id = u.id + inner join role_lk r on r.id = uro.role_id + inner join field_office_lk f on f.fo_id = uro.office_id order by u.last_name` + +var getUsersByRoleTypeSql = `select u.id, u.username, u.first_name, u.last_name, u.email, r.description from users_t u + inner join user_role_office_lk uro on uro.user_id = u.id + inner join role_lk r on r.id = uro.role_id + where r.description = :1` + +var getUserAccessRequestSql = `select id, username, first_name, last_name, email from users_t where id not in (select user_id from user_role_office_lk) order by last_name` + +var insertUserRoleOfficeSql = `insert into user_role_office_lk (id,user_id,role_id,office_id,project_code) values (user_role_office_seq.nextval,:1,:2,:3,:4)` + +var updateUserRoleOfficesSql = `update user_role_office_lk set role_id = :2, office_id = :3, project_code = :4 where id = :1` + +var getUserRoleOfficeSql = `select uro.id, uro.user_id, uro.role_id, uro.office_id, r.description, f.FIELD_OFFICE_CODE, project_code from user_role_office_lk uro + inner join users_t u on u.id = uro.user_id + inner join role_lk r on r.id = uro.role_id + inner join field_office_lk f on f.fo_id = uro.office_id + where u.email = :1` + +var getUserRoleOfficeByIdSql = `select uro.id, uro.user_id, uro.role_id, uro.office_id, r.description, f.FIELD_OFFICE_CODE, project_code from user_role_office_lk uro + inner join users_t u on u.id = uro.user_id + inner join role_lk r on r.id = uro.role_id + inner join field_office_lk f on f.fo_id = uro.office_id + where uro.id = :1` + +// Fetch List of Unique Users +var getUsersSql2 = `select distinct uro.user_id, u.username, u.first_name, u.last_name, uro.role_id from users_t u +inner join user_role_office_lk uro on uro.user_id = u.id` func InitAuthStore(appConfig *config.AppConfig) (*AuthStore, error) { - dburl := fmt.Sprintf("user=%s password=%s host=%s port=%s database=%s sslmode=disable", - appConfig.Dbuser, appConfig.Dbpass, appConfig.Dbhost, appConfig.Dbport, appConfig.Dbname) - con, err := sqlx.Connect("pgx", dburl) + connectString := fmt.Sprintf("%s:%s/%s", appConfig.Dbhost, appConfig.Dbport, appConfig.Dbname) + db, err := sqlx.Connect( + "godror", + "user="+appConfig.Dbuser+" password="+appConfig.Dbpass+" connectString="+connectString+" poolMaxSessions=100 poolSessionMaxLifetime=2m0s", + ) + db.SetMaxIdleConns(0) if err != nil { - log.Printf("Unable to connect to the authentication store: %s", err) + log.Printf("[InitAuthStore] m=GetDb,msg=connection has failed: %s", err) return nil, err } - con.SetMaxOpenConns(10) - adb := AuthStore{ - db: con, + + ss := AuthStore{ + db: db, + //config: appConfig, } - return &adb, nil + + return &ss, nil } func (auth *AuthStore) GetUserFromJwt(jwtClaims models.JwtClaim) (models.User, error) { user := models.User{} - err := auth.db.Get(&user, userSql, jwtClaims.Sub) - if err != nil { - if err == sql.ErrNoRows { - user = models.User{ - UserID: jwtClaims.Sub, - UserName: jwtClaims.Name, - Email: jwtClaims.Email, - Deleted: false, - } - err := auth.AddUser(user) - return user, err - } else { + countQuery, err := auth.db.Prepare(userSql) + if err != nil { + return user, err + } + + countrows, err := countQuery.Query(jwtClaims.Email) + if err != nil { + return user, err + } + count := 0 + for countrows.Next() { + err = countrows.Scan(&user.ID, &user.CacUid, &user.UserName, &user.Email, &user.FirstName, &user.LastName) + if err != nil { return user, err } + count += 1 + } + + if count == 0 { + user = models.User{ + UserName: jwtClaims.Email, + Email: jwtClaims.Email, + FirstName: jwtClaims.FirstName, + LastName: jwtClaims.LastName, + CacUid: jwtClaims.CacUid, + } + err := auth.AddUser(user) + return user, err } + defer countrows.Close() return user, err } func (auth *AuthStore) AddUser(user models.User) error { - _, err := auth.db.Exec(insertUserSql, user.UserID, user.UserName, user.Email) + _, err := auth.db.Exec(insertUserSql, user.UserName, user.Email, user.FirstName, user.LastName, user.CacUid) + return err +} + +func (auth *AuthStore) GetUserAccessRequests() ([]models.User, error) { + users := []models.User{} + + rows, err := auth.db.Query(getUserAccessRequestSql) + if err != nil { + return users, err + } + + for rows.Next() { + user := models.User{} + err = rows.Scan(&user.ID, &user.UserName, &user.FirstName, &user.LastName, &user.Email) + if err != nil { + return users, err + } + users = append(users, user) + } + defer rows.Close() + + return users, err +} + +func (auth *AuthStore) GetUsers() ([]models.User, error) { + users := []models.User{} + + rows, err := auth.db.Query(getUsersSql) + if err != nil { + return users, err + } + + for rows.Next() { + user := models.User{} + err = rows.Scan(&user.ID, &user.UserID, &user.UserName, &user.FirstName, &user.LastName, &user.Email, &user.RoleID, &user.Role, &user.OfficeID, &user.OfficeCode, &user.ProjectCode) + if err != nil { + return users, err + } + users = append(users, user) + } + defer rows.Close() + + return users, err +} + +func (auth *AuthStore) UpdateUserRoleOffice(userRoleOffice models.UserRoleOffice) error { + _, err := auth.db.Exec(updateUserRoleOfficesSql, userRoleOffice.RoleID, userRoleOffice.OfficeID, userRoleOffice.ProjectCode, userRoleOffice.ID) + + return err +} + +func (auth *AuthStore) GetUserById(id int) (models.User, error) { + user := models.User{} + + countQuery, err := auth.db.Prepare(userByIdSql) + if err != nil { + return user, err + } + + countrows, err := countQuery.Query(id) + if err != nil { + return user, err + } + count := 0 + for countrows.Next() { + err = countrows.Scan(&user.ID, &user.CacUid, &user.UserName, &user.Email, &user.FirstName, &user.LastName) + if err != nil { + return user, err + } + count += 1 + } + + defer countrows.Close() + return user, err +} + +func (auth *AuthStore) GetUsersByRoleType(roleType string) ([]models.User, error) { + users := []models.User{} + + selectQuery, err := auth.db.Prepare(getUsersByRoleTypeSql) + if err != nil { + return users, err + } + + rows, err := selectQuery.Query(roleType) + if err != nil { + return users, err + } + + for rows.Next() { + user := models.User{} + err = rows.Scan(&user.ID, &user.UserName, &user.FirstName, &user.LastName, &user.Email, &user.Role) + if err != nil { + return users, err + } + users = append(users, user) + } + defer rows.Close() + + return users, err +} + +func (auth *AuthStore) AddUserRoleOffice(userRoleOffice models.UserRoleOffice) error { + _, err := auth.db.Exec(insertUserRoleOfficeSql, userRoleOffice.UserID, userRoleOffice.RoleID, userRoleOffice.OfficeID, userRoleOffice.ProjectCode) + // if err == nil { + // message := []byte("Your role request has been approved.") + + // user, userErr := auth.GetUserById(userRoleOffice.UserID) + // if userErr != nil { + // log.Print("Unable to send email.", userErr) + // } + // to := []string{ + // user.Email, + // } + + // from := auth.config.EmailFrom + // emailErr := auth.SendEmail(message, to, from) + // if emailErr != nil { + // log.Print("Unable to send email.", emailErr) + // } + // } + return err } + +func (auth *AuthStore) GetUserRoleOffices(email string) ([]models.UserRoleOffice, error) { + userRoleOffices := []models.UserRoleOffice{} + selectQuery, err := auth.db.Prepare(getUserRoleOfficeSql) + if err != nil { + return userRoleOffices, err + } + + rows, err := selectQuery.Query(email) + if err != nil { + return userRoleOffices, err + } + defer rows.Close() + + for rows.Next() { + userRoleOffice := models.UserRoleOffice{} + err = rows.Scan(&userRoleOffice.ID, &userRoleOffice.UserID, &userRoleOffice.RoleID, &userRoleOffice.OfficeID, &userRoleOffice.Role, &userRoleOffice.OfficeCode, &userRoleOffice.ProjectCode) + if err != nil { + return userRoleOffices, err + } + userRoleOffices = append(userRoleOffices, userRoleOffice) + } + + // if userRoleOffice.OfficeCode == "" { + // message := []byte("There is a new user role request. Please login to appove or deny the request.") + // users, adminUserLoadErr := auth.GetUsersByRoleType("ADMINISTRATOR") + // if adminUserLoadErr != nil { + // log.Print("Unable to send email.", adminUserLoadErr) + // } + + // to := make([]string, 0) + // for _, user := range users { + // to = append(to, user.Email) + // } + + // from := auth.config.EmailFrom + // emailErr := auth.SendEmail(message, to, from) + // if emailErr != nil { + // log.Print("Unable to send email.", emailErr) + // } + // } + + return userRoleOffices, err +} + +func (auth *AuthStore) GetUserRoleOfficeById(id string) (models.UserRoleOffice, error) { + userRoleOffice := models.UserRoleOffice{} + selectQuery, err := auth.db.Prepare(getUserRoleOfficeByIdSql) + if err != nil { + return userRoleOffice, err + } + + rows, err := selectQuery.Query(id) + if err != nil { + return userRoleOffice, err + } + + for rows.Next() { + err = rows.Scan(&userRoleOffice.ID, &userRoleOffice.UserID, &userRoleOffice.RoleID, &userRoleOffice.OfficeID, &userRoleOffice.Role, &userRoleOffice.OfficeCode, &userRoleOffice.ProjectCode) + if err != nil { + return userRoleOffice, err + } + } + defer rows.Close() + + return userRoleOffice, err +} + +func (auth *AuthStore) GetUserRoleOffice(email string) (models.UserRoleOffice, error) { + userRoleOffice := models.UserRoleOffice{} + selectQuery, err := auth.db.Prepare(getUserRoleOfficeSql) + if err != nil { + return userRoleOffice, err + } + + rows, err := selectQuery.Query(email) + if err != nil { + return userRoleOffice, err + } + + for rows.Next() { + err = rows.Scan(&userRoleOffice.ID, &userRoleOffice.UserID, &userRoleOffice.RoleID, &userRoleOffice.OfficeID, &userRoleOffice.Role, &userRoleOffice.OfficeCode, &userRoleOffice.ProjectCode) + if err != nil { + return userRoleOffice, err + } + } + defer rows.Close() + + return userRoleOffice, err +} + +// func (s *AuthStore) SendEmail(message []byte, to []string, from string) error { +// // Authentication. +// auth := smtp.PlainAuth("", from, s.config.EmailPassword, s.config.SmtpHost) + +// // Sending email. +// err := smtp.SendMail(s.config.SmtpHost+":"+s.config.SmtpPort, auth, from, to, message) +// if err != nil { +// fmt.Println(err) +// return err +// } +// fmt.Println("Email Sent Successfully!") +// return nil +// } + +func (auth *AuthStore) GetUsers2() ([]models.User, error) { + rows, err := auth.db.Query(getUsersSql2) + + users := []models.User{} + if err != nil { + return users, err + } + defer rows.Close() + + for rows.Next() { + user := models.User{} + err = rows.Scan(&user.UserID, &user.UserName, &user.FirstName, &user.LastName, &user.RoleID) + if err != nil { + return nil, err + } + users = append(users, user) + } + + return users, err +} diff --git a/stores/pallidsturgeonstore.go b/stores/pallidsturgeonstore.go index 71906fc..84c31eb 100644 --- a/stores/pallidsturgeonstore.go +++ b/stores/pallidsturgeonstore.go @@ -2,39 +2,3191 @@ package stores import ( "database/sql" + "encoding/csv" + "errors" + "fmt" + "io/ioutil" + "log" + "mime/multipart" + "os" + "strconv" + "strings" + "time" - "di2e.net/cwbi/pallid_sturgeon_api/server/config" - "di2e.net/cwbi/pallid_sturgeon_api/server/models" + "github.com/USACE/pallid_sturgeon_api/server/config" + "github.com/USACE/pallid_sturgeon_api/server/models" "github.com/godror/godror" + "github.com/jmoiron/sqlx" ) type PallidSturgeonStore struct { - db *sql.DB + db *sqlx.DB config *config.AppConfig } +func processTimeString(st interface{}) (string, error) { + var t string + if st == nil { + return "", errors.New("Input is nil") + } + s, okay := st.(string) + if !okay { + return "", errors.New("Input is not a string") + } + test, err := time.Parse("2006-01-02 15:04:05 -0700 MST", s) + formattedTime := test.Format("2006-01-02") + + if err != nil { + return "", err + } + t = formattedTime + return t, nil +} +var getUserSql = `select u.id, u.username, u.first_name, u.last_name, u.email, r.description, f.FIELD_OFFICE_CODE, project_code from users_t u + inner join user_role_office_lk uro on uro.user_id = u.id + inner join role_lk r on r.id = uro.role_id + inner join field_office_lk f on f.fo_id = uro.office_id + where email = :1` + +var getUserRoleByIdSql = `select uro.id, uro.user_id, uro.role_id, uro.office_id, r.description, f.FIELD_OFFICE_CODE, project_code from user_role_office_lk uro + inner join users_t u on u.id = uro.user_id + inner join role_lk r on r.id = uro.role_id + inner join field_office_lk f on f.fo_id = uro.office_id + where uro.id = :1` + +func (s *PallidSturgeonStore) GetUser(email string) (models.User, error) { + user := models.User{} + selectQuery, err := s.db.Prepare(getUserSql) + if err != nil { + return user, err + } + + rows, err := selectQuery.Query(email) + if err != nil { + return user, err + } + defer rows.Close() + + for rows.Next() { + err = rows.Scan(&user.ID, &user.UserName, &user.FirstName, &user.LastName, &user.Email, &user.Role, &user.OfficeCode, &user.ProjectCode) + if err != nil { + return user, err + } + } + + return user, err +} + +func (s *PallidSturgeonStore) GetUserRoleById(id string) (models.UserRoleOffice, error) { + userRoleOffice := models.UserRoleOffice{} + selectQuery, err := s.db.Prepare(getUserRoleByIdSql) + if err != nil { + return userRoleOffice, err + } + + rows, err := selectQuery.Query(id) + if err != nil { + return userRoleOffice, err + } + + for rows.Next() { + err = rows.Scan(&userRoleOffice.ID, &userRoleOffice.UserID, &userRoleOffice.RoleID, &userRoleOffice.OfficeID, &userRoleOffice.Role, &userRoleOffice.OfficeCode, &userRoleOffice.ProjectCode) + if err != nil { + return userRoleOffice, err + } + } + defer rows.Close() + + return userRoleOffice, err +} + +var getProjectsSql = `select distinct p.* from project_lk p + join fieldoffice_segment_v v + on v.PROJECT_CODE = p.project_code + and v.FIELD_OFFICE_CODE = :1 + order by p.project_code` + +func (s *PallidSturgeonStore) GetProjects(fieldOfficeCode string) ([]models.Project, error) { + projects := []models.Project{} + selectQuery, err := s.db.Prepare(getProjectsSql) + if err != nil { + return projects, err + } + + rows, err := selectQuery.Query(fieldOfficeCode) + if err != nil { + return projects, err + } + defer rows.Close() + + for rows.Next() { + project := models.Project{} + err = rows.Scan(&project.Code, &project.Description) + if err != nil { + return nil, err + } + projects = append(projects, project) + } + + return projects, err +} + +// For Data Summaries Project Filter +var getProjectOneSql = `select * from project_lk where project_code <> 2` +var getProjectTwoSql = `select * from project_lk where project_code = 2` + +func (s *PallidSturgeonStore) GetProjectsFilter(project string) ([]models.Project, error) { + projects := []models.Project{} + query := "" + + if project == "1" { + query = getProjectOneSql + } + + if project == "2" { + query = getProjectTwoSql + } + + selectQuery, err := s.db.Prepare(query) + if err != nil { + return projects, err + } + + rows, err := selectQuery.Query() + if err != nil { + return projects, err + } + defer rows.Close() + + for rows.Next() { + project := models.Project{} + err = rows.Scan(&project.Code, &project.Description) + if err != nil { + return nil, err + } + projects = append(projects, project) + } + + return projects, err +} + +func (s *PallidSturgeonStore) GetRoles() ([]models.Role, error) { + roles := []models.Role{} + rows, err := s.db.Query("select * from role_lk order by id") + if err != nil { + return roles, err + } + + for rows.Next() { + role := models.Role{} + err = rows.Scan(&role.ID, &role.Description) + if err != nil { + return nil, err + } + roles = append(roles, role) + } + defer rows.Close() + + return roles, err +} + +var getFieldOfficesSql = `select FO_ID, FIELD_OFFICE_CODE, FIELD_OFFICE_DESCRIPTION, STATE from field_office_lk where field_office_code <> 'ZZ' order by FO_ID` + +var getFieldOfficeSqlAll = `select FO_ID, FIELD_OFFICE_CODE, FIELD_OFFICE_DESCRIPTION, STATE from field_office_lk order by FO_ID` + +func (s *PallidSturgeonStore) GetFieldOffices(showAll string) ([]models.FieldOffice, error) { + query := "" + + if showAll == "true" { + query = getFieldOfficeSqlAll + } else { + query = getFieldOfficesSql + } + + fieldOffices := []models.FieldOffice{} + rows, err := s.db.Query(query) + if err != nil { + return fieldOffices, err + } + defer rows.Close() + + for rows.Next() { + fieldOffice := models.FieldOffice{} + err = rows.Scan(&fieldOffice.ID, &fieldOffice.Code, &fieldOffice.Description, &fieldOffice.State) + if err != nil { + return nil, err + } + fieldOffices = append(fieldOffices, fieldOffice) + } + + return fieldOffices, err +} + +func (s *PallidSturgeonStore) GetSampleUnitTypes() ([]models.SampleUnitType, error) { + rows, err := s.db.Query("select * from sample_unit_type_lk where sample_unit_type_code <> 'A' order by SAMPLE_UNIT_TYPE_CODE") + + sampleUnitTypes := []models.SampleUnitType{} + if err != nil { + return sampleUnitTypes, err + } + defer rows.Close() + + for rows.Next() { + sampleUnitType := models.SampleUnitType{} + err = rows.Scan(&sampleUnitType.Code, &sampleUnitType.Description) + if err != nil { + return nil, err + } + sampleUnitTypes = append(sampleUnitTypes, sampleUnitType) + } + + return sampleUnitTypes, err +} + +var getSeasonsSql = `select distinct s.s_id,si.SEASON, s.season_description from ds_sites si inner join table (pallid_data_entry_api.data_entry_site_fnc(:1,:2,:3,null,null,null)) +fnc on si.site_id = fnc.site_id inner join season_lk s on s.season_code = si.season` + +func (s *PallidSturgeonStore) GetSeasons(year string, officeCode string, projectCode string) ([]models.Season, error) { + seasons := []models.Season{} + + selectQuery, err := s.db.Prepare(getSeasonsSql) + if err != nil { + return seasons, err + } + + rows, err := selectQuery.Query(year, officeCode, projectCode) + if err != nil { + return seasons, err + } + defer rows.Close() + + for rows.Next() { + season := models.Season{} + err = rows.Scan(&season.ID, &season.Code, &season.Description) + if err != nil { + return nil, err + } + seasons = append(seasons, season) + } + + return seasons, err +} + +var getSegmentsSql = `select distinct segment_code as code, segment_description as description from fieldoffice_segment_v where (CASE when :1 != 'ZZ' THEN field_office_code ELSE :2 END) = :3 and project_code = :4 order by code asc` + +func (s *PallidSturgeonStore) GetSegments(officeCode string, projectCode string) ([]models.Segment, error) { + segments := []models.Segment{} + + selectQuery, err := s.db.Prepare(getSegmentsSql) + if err != nil { + return segments, err + } + + rows, err := selectQuery.Query(officeCode, officeCode, officeCode, projectCode) + if err != nil { + return segments, err + } + defer rows.Close() + + for rows.Next() { + segment := models.Segment{} + err = rows.Scan(&segment.Code, &segment.Description) + if err != nil { + return nil, err + } + segments = append(segments, segment) + } + + return segments, err +} + +var getSampleUnitSql = `select sample_unit, sample_unit_desc as description from segment_sampleunit_v where segment_code = :1 and sample_unit_type = :2 order by 1` + +func (s *PallidSturgeonStore) GetSampleUnit(sampleUnitType string, segmentCode string) ([]models.SampleUnit, error) { + sampleUnits := []models.SampleUnit{} + + selectQuery, err := s.db.Prepare(getSampleUnitSql) + if err != nil { + return sampleUnits, err + } + + rows, err := selectQuery.Query(segmentCode, sampleUnitType) + if err != nil { + return sampleUnits, err + } + defer rows.Close() + + for rows.Next() { + sampleUnit := models.SampleUnit{} + err = rows.Scan(&sampleUnit.SampleUnit, &sampleUnit.Description) + if err != nil { + return nil, err + } + sampleUnits = append(sampleUnits, sampleUnit) + } + + return sampleUnits, err +} + +func (s *PallidSturgeonStore) GetBendRn() ([]models.BendRn, error) { + rows, err := s.db.Query("select bs_id, BEND_SELECTION_DESCRIPTION, BEND_SELECTION_CODE from BEND_SELECTION_LK order by 1 desc") + + bendRnsItems := []models.BendRn{} + if err != nil { + return bendRnsItems, err + } + defer rows.Close() + + for rows.Next() { + bendRn := models.BendRn{} + err = rows.Scan(&bendRn.ID, &bendRn.Description, &bendRn.Code) + if err != nil { + return nil, err + } + bendRnsItems = append(bendRnsItems, bendRn) + } + + return bendRnsItems, err +} + +func (s *PallidSturgeonStore) GetMeso(macro string) ([]models.Meso, error) { + rows, err := s.db.Query("select MESOHABITAT_CODE from macro_meso_lk where MACROHABITAT_CODE = :1 order by 1 asc", macro) + + mesoItems := []models.Meso{} + if err != nil { + return mesoItems, err + } + defer rows.Close() + + for rows.Next() { + meso := models.Meso{} + err = rows.Scan(&meso.Code) + if err != nil { + return nil, err + } + mesoItems = append(mesoItems, meso) + } + + return mesoItems, err +} + +func (s *PallidSturgeonStore) GetStructureFlow(microStructure string) ([]models.StructureFlow, error) { + rows, err := s.db.Query("select structure_flow, structure_flow_code from micro_habitat_desc_lk where micro_structure_code = :1 group by structure_flow, structure_flow_code", microStructure) + + structureFlowItems := []models.StructureFlow{} + if err != nil { + return structureFlowItems, err + } + defer rows.Close() + + for rows.Next() { + structureFlow := models.StructureFlow{} + err = rows.Scan(&structureFlow.Code, &structureFlow.ID) + if err != nil { + return nil, err + } + structureFlowItems = append(structureFlowItems, structureFlow) + } + + return structureFlowItems, err +} + +func (s *PallidSturgeonStore) GetStructureMod(structureFlow string) ([]models.StructureMod, error) { + rows, err := s.db.Query("select structure_mod, structure_mod_code from micro_habitat_desc_lk where structure_flow_code = :1 group by structure_mod, structure_mod_code", structureFlow) + + structureModItems := []models.StructureMod{} + if err != nil { + return structureModItems, err + } + defer rows.Close() + + for rows.Next() { + structureMod := models.StructureMod{} + err = rows.Scan(&structureMod.Description, &structureMod.Code) + if err != nil { + return nil, err + } + structureModItems = append(structureModItems, structureMod) + } + + return structureModItems, err +} + +func (s *PallidSturgeonStore) GetSpecies() ([]models.Species, error) { + rows, err := s.db.Query("select alpha_code from fish_code_lk order by 1 asc") + + speciesItems := []models.Species{} + if err != nil { + return speciesItems, err + } + defer rows.Close() + + for rows.Next() { + species := models.Species{} + err = rows.Scan(&species.Code) + if err != nil { + return nil, err + } + speciesItems = append(speciesItems, species) + } + + return speciesItems, err +} + +func (s *PallidSturgeonStore) GetFtPrefixes() ([]models.FtPrefix, error) { + rows, err := s.db.Query("select tag_prefix_code from floy_tag_prefix_code_lk order by 1 asc") + + ftPrefixItems := []models.FtPrefix{} + if err != nil { + return ftPrefixItems, err + } + defer rows.Close() + + for rows.Next() { + ftPrefix := models.FtPrefix{} + err = rows.Scan(&ftPrefix.Code) + if err != nil { + return nil, err + } + ftPrefixItems = append(ftPrefixItems, ftPrefix) + } + + return ftPrefixItems, err +} + +func (s *PallidSturgeonStore) GetMr() ([]models.Mr, error) { + rows, err := s.db.Query("select mark_recapture_code, mark_recapture_description from mark_recapture_lk order by 1 asc") + + mrItems := []models.Mr{} + if err != nil { + return mrItems, err + } + defer rows.Close() + + for rows.Next() { + mr := models.Mr{} + err = rows.Scan(&mr.Code, &mr.Description) + if err != nil { + return nil, err + } + mrItems = append(mrItems, mr) + } + + return mrItems, err +} + +func (s *PallidSturgeonStore) GetOtolith() ([]models.Otolith, error) { + rows, err := s.db.Query("select code, description from fish_structure_lk order by 1 asc") + + otolithItems := []models.Otolith{} + if err != nil { + return otolithItems, err + } + defer rows.Close() + + for rows.Next() { + otolith := models.Otolith{} + err = rows.Scan(&otolith.Code, &otolith.Description) + if err != nil { + return nil, err + } + otolithItems = append(otolithItems, otolith) + } + + return otolithItems, err +} + +func (s *PallidSturgeonStore) GetSetSite1(microstructure string) ([]models.SetSite1, error) { + rows, err := s.db.Query("select set_site_1, set_site_1_code from micro_set_site_lk where structure_code=:1 group by set_site_1, set_site_1_code order by set_site_1_code asc", microstructure) + + setSiteItems := []models.SetSite1{} + if err != nil { + return setSiteItems, err + } + defer rows.Close() + + for rows.Next() { + setSite := models.SetSite1{} + err = rows.Scan(&setSite.Description, &setSite.Code) + if err != nil { + return nil, err + } + setSiteItems = append(setSiteItems, setSite) + } + + return setSiteItems, err +} + +func (s *PallidSturgeonStore) GetSetSite2(setsite1 string) ([]models.SetSite2, error) { + rows, err := s.db.Query("select set_site_two, set_site_two_code from micro_set_site_lk where set_site_1_code=:1 and set_site_two != 'SIDE NOTCH' group by set_site_two, set_site_two_code order by set_site_two_code asc", setsite1) + + setSiteItems := []models.SetSite2{} + if err != nil { + return setSiteItems, err + } + defer rows.Close() + + for rows.Next() { + setSite := models.SetSite2{} + err = rows.Scan(&setSite.Description, &setSite.Code) + if err != nil { + return nil, err + } + setSiteItems = append(setSiteItems, setSite) + } + + return setSiteItems, err +} + +func (s *PallidSturgeonStore) GetYears() ([]models.Year, error) { + rows, err := s.db.Query("select year from year_lk order by year desc") + + items := []models.Year{} + if err != nil { + return items, err + } + defer rows.Close() + + for rows.Next() { + year := models.Year{} + err = rows.Scan(&year.Year) + if err != nil { + return nil, err + } + items = append(items, year) + } + + return items, err +} + +var siteDataEntriesSql = `select si.SITE_ID,si.YEAR,si.FIELDOFFICE,si.PROJECT_ID,si.SEGMENT_ID,si.SEASON,si.BEND,si.BENDRN,si.SITE_FID,si.UPLOADED_BY,si.LAST_EDIT_COMMENT,si.EDIT_INITIALS,si.complete, +si.approved,si.UPLOAD_FILENAME,si.UPLOAD_SESSION_ID,si.SAMPLE_UNIT_TYPE,si.brm_id,fnc.bkg_color,fnc.bend_river_mile +from ds_sites si inner join table (pallid_data_entry_api.data_entry_site_fnc(:2,:3,:4,:5,:6,:7)) fnc on si.site_id = fnc.site_id` + +var siteDataEntriesCountSql = `SELECT count(*) from ds_sites si inner join table (pallid_data_entry_api.data_entry_site_fnc(:2,:3,:4,:5,:6,:7)) fnc on si.site_id = fnc.site_id` + +var siteDataEntriesBySiteIdSql = `select si.SITE_ID,si.YEAR,si.FIELDOFFICE,si.PROJECT_ID,si.SEGMENT_ID,si.SEASON,si.BEND,si.BENDRN,si.SITE_FID,si.UPLOADED_BY,si.LAST_EDIT_COMMENT,si.EDIT_INITIALS,si.complete, +si.approved,si.UPLOAD_FILENAME,si.UPLOAD_SESSION_ID,si.SAMPLE_UNIT_TYPE,si.brm_id,fnc.bkg_color,fnc.bend_river_mile +from ds_sites si inner join table (pallid_data_entry_api.data_entry_site_fnc(:2,:3,:4,:5,:6,:7)) fnc on si.site_id = fnc.site_id where site_id=:1` + +var siteDataEntriesCountBySiteIdSql = `SELECT count(*) from ds_sites si inner join table (pallid_data_entry_api.data_entry_site_fnc(:2,:3,:4,:5,:6,:7)) fnc on si.site_id = fnc.site_id where site_id=:1` + +func (s *PallidSturgeonStore) GetSiteDataEntries(siteId string, year string, officeCode string, project string, segment string, season string, bend string, queryParams models.SearchParams) (models.SitesWithCount, error) { + siteDataEntryWithCount := models.SitesWithCount{} + query := "" + queryWithCount := "" + id := "" + + if siteId != "" { + query = siteDataEntriesBySiteIdSql + queryWithCount = siteDataEntriesCountBySiteIdSql + id = siteId + } + + if siteId == "" { + query = siteDataEntriesSql + queryWithCount = siteDataEntriesCountSql + } + + countQuery, err := s.db.Prepare(queryWithCount) + if err != nil { + return siteDataEntryWithCount, err + } + + var countrows *sql.Rows + if id == "" { + countrows, err = countQuery.Query(year, officeCode, project, bend, season, segment) + if err != nil { + return siteDataEntryWithCount, err + } + } else { + countrows, err = countQuery.Query(year, officeCode, project, bend, season, segment, id) + if err != nil { + return siteDataEntryWithCount, err + } + } + defer countrows.Close() + + for countrows.Next() { + err = countrows.Scan(&siteDataEntryWithCount.TotalCount) + if err != nil { + return siteDataEntryWithCount, err + } + } + + siteEntries := []models.Sites{} + offset := queryParams.PageSize * queryParams.Page + if queryParams.OrderBy == "" { + queryParams.OrderBy = "site_id desc" + } + siteDataEntriesSqlWithSearch := query + fmt.Sprintf(" order by %s OFFSET %s ROWS FETCH NEXT %s ROWS ONLY", queryParams.OrderBy, strconv.Itoa(offset), strconv.Itoa(queryParams.PageSize)) + dbQuery, err := s.db.Prepare(siteDataEntriesSqlWithSearch) + if err != nil { + return siteDataEntryWithCount, err + } + + var rows *sql.Rows + if id == "" { + rows, err = dbQuery.Query(year, officeCode, project, bend, season, segment) + if err != nil { + return siteDataEntryWithCount, err + } + } else { + rows, err = dbQuery.Query(year, officeCode, project, bend, season, segment, id) + if err != nil { + return siteDataEntryWithCount, err + } + } + defer rows.Close() + + for rows.Next() { + siteDataEntry := models.Sites{} + err = rows.Scan(&siteDataEntry.SiteID, &siteDataEntry.Year, &siteDataEntry.FieldofficeId, &siteDataEntry.ProjectId, &siteDataEntry.SegmentId, &siteDataEntry.SeasonId, &siteDataEntry.Bend, &siteDataEntry.Bendrn, &siteDataEntry.SiteFID, + &siteDataEntry.UploadedBy, &siteDataEntry.LastEditComment, &siteDataEntry.EditInitials, &siteDataEntry.Complete, &siteDataEntry.Approved, &siteDataEntry.UploadFilename, &siteDataEntry.UploadSessionId, + &siteDataEntry.SampleUnitTypeCode, &siteDataEntry.BrmID, &siteDataEntry.BkgColor, &siteDataEntry.BendRiverMile) + if err != nil { + return siteDataEntryWithCount, err + } + siteEntries = append(siteEntries, siteDataEntry) + } + + siteDataEntryWithCount.Items = siteEntries + + return siteDataEntryWithCount, err +} + +var insertSiteDataSql = `insert into ds_sites (brm_id, site_fid, year, FIELDOFFICE, PROJECT_ID, + SEGMENT_ID, SEASON, SAMPLE_UNIT_TYPE, bend, BENDRN, edit_initials, last_updated, last_edit_comment, uploaded_by) + values ((CASE + when :14 = 'B' THEN (select brm_id from bend_river_mile_lk where bend_num = :15 and b_segment = :16) + when :17 = 'C' THEN (select chute_id from chute_lk where chute_code = :18 and segment_id = :19) + when :20 = 'R' THEN (select reach_id from reach_lk where reach_code = :21 and segment_id = :22) + ELSE 0 + END),:1,:2,:3,:4,:5,:6,:7,:8,:9,:10,:11,:12,:13) returning site_id into :23` + +func (s *PallidSturgeonStore) SaveSiteDataEntry(code string, sampleUnitType string, segmentCode string, sitehDataEntry models.Sites) (int, error) { + var id int + _, err := s.db.Exec(insertSiteDataSql, sampleUnitType, code, segmentCode, sampleUnitType, code, segmentCode, sampleUnitType, code, segmentCode, sitehDataEntry.SiteFID, sitehDataEntry.Year, sitehDataEntry.FieldofficeId, sitehDataEntry.ProjectId, + sitehDataEntry.SegmentId, sitehDataEntry.SeasonId, sitehDataEntry.SampleUnitTypeCode, sitehDataEntry.Bend, sitehDataEntry.Bendrn, sitehDataEntry.EditInitials, sitehDataEntry.LastUpdated, + sitehDataEntry.LastEditComment, sitehDataEntry.UploadedBy, sql.Out{Dest: &id}) + + return id, err +} + +var updateSiteDataSql = `UPDATE ds_sites +SET site_fid = :2, + year = :3, + FIELDOFFICE = :4, + PROJECT_ID = :5, + SEGMENT_ID = :6, + SEASON = :7, + SAMPLE_UNIT_TYPE = :8, + BENDRN = :9, + edit_initials = :10, + last_updated = :11, + uploaded_by = :12, + brm_id = :13, + last_edit_comment = :14 +WHERE site_id = :1` + +func (s *PallidSturgeonStore) UpdateSiteDataEntry(sitehDataEntry models.Sites) error { + _, err := s.db.Exec(updateSiteDataSql, sitehDataEntry.SiteFID, sitehDataEntry.Year, sitehDataEntry.FieldofficeId, sitehDataEntry.ProjectId, sitehDataEntry.SegmentId, sitehDataEntry.SeasonId, sitehDataEntry.SampleUnitTypeCode, + sitehDataEntry.Bendrn, sitehDataEntry.EditInitials, sitehDataEntry.LastUpdated, sitehDataEntry.UploadedBy, sitehDataEntry.BrmID, sitehDataEntry.LastEditComment, sitehDataEntry.SiteID) + return err +} + +var fishDataEntriesSql = `select fi.f_id, fi.f_fid, fi.mr_id, si.site_id, fi.panelhook,fi.bait,fi.species, fi.length, fi.weight, fi.fishcount, fi.otolith, fi.rayspine, fi.scale, fi.ftprefix, fi.ftnum, fi.ftmr, fi.edit_initials, +fi.last_edit_comment, fi.uploaded_by, fi.genetics_vial_number, fi.condition, fi.fin_curl from ds_fish fi inner join ds_moriver mo on fi.mr_id = mo.mr_id inner join ds_sites si on si.site_id = mo.site_id +where (CASE when :1 != 'ZZ' THEN si.fieldoffice ELSE :2 END) = :3` + +var fishDataEntriesCountSql = `select count(*) from ds_fish fi inner join ds_moriver mo on fi.mr_id = mo.mr_id inner join ds_sites si on si.site_id = mo.site_id +where (CASE when :1 != 'ZZ' THEN si.fieldoffice ELSE :2 END) = :3` + +var fishDataEntriesByFidSql = `select fi.f_id, fi.f_fid, fi.mr_id, si.site_id, fi.panelhook,fi.bait,fi.species, fi.length, fi.weight, fi.fishcount, fi.otolith, fi.rayspine, fi.scale, fi.ftprefix, fi.ftnum, fi.ftmr, fi.edit_initials, +fi.last_edit_comment, fi.uploaded_by, fi.genetics_vial_number, fi.condition, fi.fin_curl from ds_fish fi inner join ds_moriver mo on fi.mr_id = mo.mr_id inner join ds_sites si on si.site_id = mo.site_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and fi.f_id = :1` + +var fishDataEntriesCountByFidSql = `select count(*) from ds_fish fi inner join ds_moriver mo on fi.mr_id = mo.mr_id inner join ds_sites si on si.site_id = mo.site_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and fi.f_id = :1` + +var fishDataEntriesByFfidSql = `select fi.f_id, fi.f_fid, fi.mr_id, si.site_id, fi.panelhook,fi.bait,fi.species, fi.length, fi.weight, fi.fishcount, fi.otolith, fi.rayspine, fi.scale, fi.ftprefix, fi.ftnum, fi.ftmr, fi.edit_initials, +fi.last_edit_comment, fi.uploaded_by, fi.genetics_vial_number, fi.condition, fi.fin_curl from ds_fish fi inner join ds_moriver mo on fi.mr_id = mo.mr_id inner join ds_sites si on si.site_id = mo.site_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and fi.f_fid = :1` + +var fishDataEntriesCountByFfidSql = `select count(*) from ds_fish fi inner join ds_moriver mo on fi.mr_id = mo.mr_id inner join ds_sites si on si.site_id = mo.site_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and fi.f_fid = :1` + +var fishDataEntriesByMridSql = `select fi.f_id, fi.f_fid, fi.mr_id, si.site_id, fi.panelhook,fi.bait,fi.species, fi.length, fi.weight, fi.fishcount, fi.otolith, fi.rayspine, fi.scale, fi.ftprefix, fi.ftnum, fi.ftmr, fi.edit_initials, +fi.last_edit_comment, fi.uploaded_by, fi.genetics_vial_number, fi.condition, fi.fin_curl from ds_fish fi inner join ds_moriver mo on fi.mr_id = mo.mr_id inner join ds_sites si on si.site_id = mo.site_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and fi.mr_id = :1` + +var fishDataEntriesCountByMridSql = `select count(*) from ds_fish fi inner join ds_moriver mo on fi.mr_id = mo.mr_id inner join ds_sites si on si.site_id = mo.site_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and fi.mr_id = :1` + +func (s *PallidSturgeonStore) GetFishDataEntries(tableId string, fieldId string, mrId string, officeCode string, queryParams models.SearchParams) (models.FishDataEntryWithCount, error) { + fishDataEntryWithCount := models.FishDataEntryWithCount{} + query := "" + queryWithCount := "" + id := "" + + if tableId != "" { + query = fishDataEntriesByFidSql + queryWithCount = fishDataEntriesCountByFidSql + id = tableId + } + + if fieldId != "" { + query = fishDataEntriesByFfidSql + queryWithCount = fishDataEntriesCountByFfidSql + id = fieldId + } + + if mrId != "" { + query = fishDataEntriesByMridSql + queryWithCount = fishDataEntriesCountByMridSql + id = mrId + } + + if fieldId == "" && tableId == "" && mrId == "" { + query = fishDataEntriesSql + queryWithCount = fishDataEntriesCountSql + } + + countQuery, err := s.db.Prepare(queryWithCount) + if err != nil { + return fishDataEntryWithCount, err + } + + var countrows *sql.Rows + if id == "" { + countrows, err = countQuery.Query(officeCode, officeCode, officeCode) + if err != nil { + return fishDataEntryWithCount, err + } + } else { + countrows, err = countQuery.Query(officeCode, officeCode, officeCode, id) + if err != nil { + return fishDataEntryWithCount, err + } + } + defer countrows.Close() + + for countrows.Next() { + err = countrows.Scan(&fishDataEntryWithCount.TotalCount) + if err != nil { + return fishDataEntryWithCount, err + } + } + + fishEntries := []models.UploadFish{} + if queryParams.OrderBy == "" { + queryParams.OrderBy = "f_id asc" + } + + fishDataEntriesSqlWithSearch := query + fmt.Sprintf(" order by %s", queryParams.OrderBy) + dbQuery, err := s.db.Prepare(fishDataEntriesSqlWithSearch) + if err != nil { + return fishDataEntryWithCount, err + } + + var rows *sql.Rows + if id == "" { + rows, err = dbQuery.Query(officeCode, officeCode, officeCode) + if err != nil { + return fishDataEntryWithCount, err + } + } else { + rows, err = dbQuery.Query(officeCode, officeCode, officeCode, id) + if err != nil { + return fishDataEntryWithCount, err + } + } + defer rows.Close() + + for rows.Next() { + fishDataEntry := models.UploadFish{} + err = rows.Scan(&fishDataEntry.Fid, &fishDataEntry.Ffid, &fishDataEntry.MrID, &fishDataEntry.SiteID, &fishDataEntry.Panelhook, &fishDataEntry.Bait, &fishDataEntry.Species, &fishDataEntry.Length, &fishDataEntry.Weight, &fishDataEntry.Fishcount, &fishDataEntry.Otolith, &fishDataEntry.Rayspine, + &fishDataEntry.Scale, &fishDataEntry.Ftprefix, &fishDataEntry.Ftnum, &fishDataEntry.Ftmr, &fishDataEntry.EditInitials, &fishDataEntry.LastEditComment, &fishDataEntry.UploadedBy, &fishDataEntry.GeneticsVialNumber, &fishDataEntry.Condition, &fishDataEntry.FinCurl) + if err != nil { + return fishDataEntryWithCount, err + } + fishEntries = append(fishEntries, fishDataEntry) + } + + fishDataEntryWithCount.Items = fishEntries + + return fishDataEntryWithCount, err +} + +var insertFishDataSql = `insert into ds_fish (FIELDOFFICE,PROJECT,SEGMENT,uniqueidentifier,id,panelhook,bait,SPECIES,length,weight,FISHCOUNT,otolith,rayspine,scale,FTPREFIX,FTNUM,FTMR,mr_id,edit_initials,last_edit_comment, +last_updated, uploaded_by, genetics_vial_number, condition, fin_curl) +values (:1,:2,:3,:4,:5,:6,:7,:8,:9,:10,:11,:12,:13,:14,:15,:16,:17,:18,:19,:20,:21,:22,:23,:24,:25) returning f_id into :26` + +func (s *PallidSturgeonStore) SaveFishDataEntry(fishDataEntry models.UploadFish) (int, error) { + var id int + _, err := s.db.Exec(insertFishDataSql, fishDataEntry.Fieldoffice, fishDataEntry.Project, fishDataEntry.Segment, fishDataEntry.UniqueID, fishDataEntry.Id, fishDataEntry.Panelhook, + fishDataEntry.Bait, fishDataEntry.Species, fishDataEntry.Length, fishDataEntry.Weight, fishDataEntry.Fishcount, fishDataEntry.Otolith, fishDataEntry.Rayspine, + fishDataEntry.Scale, fishDataEntry.Ftprefix, fishDataEntry.Ftnum, fishDataEntry.Ftmr, fishDataEntry.MrID, fishDataEntry.EditInitials, fishDataEntry.LastEditComment, fishDataEntry.LastUpdated, fishDataEntry.UploadedBy, + fishDataEntry.GeneticsVialNumber, fishDataEntry.Condition, fishDataEntry.FinCurl, sql.Out{Dest: &id}) + + return id, err +} + +var updateFishDataSql = `UPDATE ds_fish SET +FIELDOFFICE = :2, +PROJECT = :3, +SEGMENT = :4, +uniqueidentifier = :5, +id = :6, +panelhook = :7, +bait = :8, +SPECIES = :9, +length = :10, +weight = :11, +FISHCOUNT = :12, +otolith = :13, +rayspine = :14, +scale = :15, +FTPREFIX = :16, +FTNUM = :17, +FTMR = :18, +edit_initials = :19, +last_edit_comment = :20, +last_updated = :21, +uploaded_by = :22, +genetics_vial_number = :23, +condition = :24, +fin_curl = :25 +WHERE f_id = :1` + +func (s *PallidSturgeonStore) UpdateFishDataEntry(fishDataEntry models.UploadFish) error { + _, err := s.db.Exec(updateFishDataSql, fishDataEntry.Fieldoffice, fishDataEntry.Project, fishDataEntry.Segment, fishDataEntry.UniqueID, fishDataEntry.Id, fishDataEntry.Panelhook, + fishDataEntry.Bait, fishDataEntry.Species, fishDataEntry.Length, fishDataEntry.Weight, fishDataEntry.Fishcount, fishDataEntry.Otolith, fishDataEntry.Rayspine, + fishDataEntry.Scale, fishDataEntry.Ftprefix, fishDataEntry.Ftnum, fishDataEntry.Ftmr, fishDataEntry.EditInitials, fishDataEntry.LastEditComment, fishDataEntry.LastUpdated, fishDataEntry.UploadedBy, + fishDataEntry.GeneticsVialNumber, fishDataEntry.Condition, fishDataEntry.FinCurl, fishDataEntry.Fid) + return err +} + +func (s *PallidSturgeonStore) DeleteFishDataEntry(id string) error { + _, err := s.db.Exec("delete from ds_fish where f_id = :1", id) + return err +} + +var insertMoriverDataSql = `insert into ds_moriver(mr_fid,site_id,FIELDOFFICE,PROJECT,SEGMENT,SEASON,setdate, subsample, subsamplepass, subsamplen, recorder, + gear, GEAR_TYPE, temp, turbidity, conductivity, do, distance, width, netrivermile, structurenumber, usgs, riverstage, discharge, + u1, u2, u3, u4, u5, u6, u7, MACRO, MESO, habitatrn, qc, micro_structure, structure_flow, structure_mod, set_site_1, set_site_2, + set_site_3, starttime, startlatitude, startlongitude, stoptime, stoplatitude, stoplongitude, depth1, velocitybot1, velocity08_1, + velocity02or06_1, depth2, velocitybot2, velocity08_2, velocity02or06_2, depth3, velocitybot3, velocity08_3, velocity02or06_3, + watervel, cobble, ORGANIC, silt, sand, gravel, comments, complete, checkby, no_turbidity, no_velocity, edit_initials,last_edit_comment, + last_updated, uploaded_by, bend, bendrn, bendrivermile) values (:1,:2,:3,:4,:5,:6,:7,:8,:9,:10,:11,:12,:13,:14,:15,:16,:17,:18,:19,:20, + :21,:22,:23,:24,:25,:26,:27,:28,:29,:30,:31,:32,:33,:34,:35,:36,:37,:38,:39,:40,:41,:42,:43,:44,:45,:46,:47,:48,:49,:50, + :51,:52,:53,:54,:55,:56,:57,:58,:59,:60,:61,:62,:63,:64,:65,:66,:67,:68,:69,:70,:71,:72,:73,:74,:75,:76,:77) returning mr_id into :78` + +func (s *PallidSturgeonStore) SaveMoriverDataEntry(moriverDataEntry models.UploadMoriver) (int, error) { + var id int + _, err := s.db.Exec(insertMoriverDataSql, moriverDataEntry.MrFid, moriverDataEntry.SiteID, moriverDataEntry.FieldOffice, + moriverDataEntry.Project, moriverDataEntry.Segment, moriverDataEntry.Season, moriverDataEntry.SetDate, moriverDataEntry.Subsample, moriverDataEntry.Subsamplepass, + moriverDataEntry.Subsamplen, moriverDataEntry.Recorder, moriverDataEntry.Gear, moriverDataEntry.GearType, moriverDataEntry.Temp, moriverDataEntry.Turbidity, moriverDataEntry.Conductivity, moriverDataEntry.Do, + moriverDataEntry.Distance, moriverDataEntry.Width, moriverDataEntry.Netrivermile, moriverDataEntry.Structurenumber, moriverDataEntry.Usgs, moriverDataEntry.Riverstage, moriverDataEntry.Discharge, + moriverDataEntry.U1, moriverDataEntry.U2, moriverDataEntry.U3, moriverDataEntry.U4, moriverDataEntry.U5, moriverDataEntry.U6, moriverDataEntry.U7, moriverDataEntry.Macro, moriverDataEntry.Meso, moriverDataEntry.Habitatrn, moriverDataEntry.Qc, + moriverDataEntry.MicroStructure, moriverDataEntry.StructureFlow, moriverDataEntry.StructureMod, moriverDataEntry.SetSite1, moriverDataEntry.SetSite2, moriverDataEntry.SetSite3, + moriverDataEntry.StartTime, moriverDataEntry.StartLatitude, moriverDataEntry.StartLongitude, moriverDataEntry.StopTime, moriverDataEntry.StopLatitude, moriverDataEntry.StopLongitude, + moriverDataEntry.Depth1, moriverDataEntry.Velocitybot1, moriverDataEntry.Velocity08_1, moriverDataEntry.Velocity02or06_1, + moriverDataEntry.Depth2, moriverDataEntry.Velocitybot2, moriverDataEntry.Velocity08_2, moriverDataEntry.Velocity02or06_2, + moriverDataEntry.Depth3, moriverDataEntry.Velocitybot3, moriverDataEntry.Velocity08_3, moriverDataEntry.Velocity02or06_3, + moriverDataEntry.Watervel, moriverDataEntry.Cobble, moriverDataEntry.Organic, moriverDataEntry.Silt, moriverDataEntry.Sand, moriverDataEntry.Gravel, + moriverDataEntry.Comments, moriverDataEntry.Complete, moriverDataEntry.Checkby, moriverDataEntry.NoTurbidity, moriverDataEntry.NoVelocity, moriverDataEntry.EditInitials, moriverDataEntry.LastEditComment, moriverDataEntry.LastUpdated, moriverDataEntry.UploadedBy, + moriverDataEntry.Bend, moriverDataEntry.BendRn, moriverDataEntry.BendRiverMile, sql.Out{Dest: &id}) + return id, err +} + +var updateMoriverDataSql = `UPDATE ds_moriver +SET project = :2,segment = :3,SEASON = :4,setdate = :5, subsample = :6, subsamplepass = :7, subsamplen = :8, recorder = :9, gear = :10, +GEAR_TYPE = :11, temp = :12, turbidity = :13, conductivity = :14, do = :15, distance = :16, width = :17, netrivermile = :18, structurenumber = :19, +usgs = :20, riverstage = :21, discharge = :22, u1 = :23, u2 = :24, u3 = :25, u4 = :26, u5 = :27, u6 = :28, u7 = :29, MACRO = :30, MESO = :31, +habitatrn = :32, qc = :33, micro_structure = :34, structure_flow = :35, structure_mod = :36, set_site_1 = :37, set_site_2 = :38, set_site_3 = :39, +starttime = :40, startlatitude = :41, startlongitude = :42, stoptime = :43, stoplatitude = :44, stoplongitude = :45, +depth1 = :46, velocitybot1 = :47, velocity08_1 = :48, velocity02or06_1 = :49, +depth2 = :50, velocitybot2 = :51, velocity08_2 = :52, velocity02or06_2 = :53, +depth3 = :54, velocitybot3 = :55, velocity08_3 = :56, velocity02or06_3 = :57, +watervel = :58, cobble = :59, ORGANIC = :60, silt = :61, sand = :62, gravel = :63, comments = :64, complete = :65, checkby = :66, +no_turbidity = :67, no_velocity = :68, edit_initials = :69, mr_fid= :70, site_id = :71, FIELDOFFICE = :72, last_edit_comment = :73, last_updated = :74, +uploaded_by = :75, bend = :76, bendrn = :77, bendrivermile =:78 WHERE mr_id = :1` + +func (s *PallidSturgeonStore) UpdateMoriverDataEntry(moriverDataEntry models.UploadMoriver) error { + _, err := s.db.Exec(updateMoriverDataSql, + moriverDataEntry.Project, moriverDataEntry.Segment, moriverDataEntry.Season, moriverDataEntry.SetDate, moriverDataEntry.Subsample, moriverDataEntry.Subsamplepass, + moriverDataEntry.Subsamplen, moriverDataEntry.Recorder, moriverDataEntry.Gear, moriverDataEntry.GearType, moriverDataEntry.Temp, moriverDataEntry.Turbidity, moriverDataEntry.Conductivity, moriverDataEntry.Do, + moriverDataEntry.Distance, moriverDataEntry.Width, moriverDataEntry.Netrivermile, moriverDataEntry.Structurenumber, moriverDataEntry.Usgs, moriverDataEntry.Riverstage, moriverDataEntry.Discharge, + moriverDataEntry.U1, moriverDataEntry.U2, moriverDataEntry.U3, moriverDataEntry.U4, moriverDataEntry.U5, moriverDataEntry.U6, moriverDataEntry.U7, moriverDataEntry.Macro, moriverDataEntry.Meso, moriverDataEntry.Habitatrn, moriverDataEntry.Qc, + moriverDataEntry.MicroStructure, moriverDataEntry.StructureFlow, moriverDataEntry.StructureMod, moriverDataEntry.SetSite1, moriverDataEntry.SetSite2, moriverDataEntry.SetSite3, + moriverDataEntry.StartTime, moriverDataEntry.StartLatitude, moriverDataEntry.StartLongitude, moriverDataEntry.StopTime, moriverDataEntry.StopLatitude, moriverDataEntry.StopLongitude, + moriverDataEntry.Depth1, moriverDataEntry.Velocitybot1, moriverDataEntry.Velocity08_1, moriverDataEntry.Velocity02or06_1, + moriverDataEntry.Depth2, moriverDataEntry.Velocitybot2, moriverDataEntry.Velocity08_2, moriverDataEntry.Velocity02or06_2, + moriverDataEntry.Depth3, moriverDataEntry.Velocitybot3, moriverDataEntry.Velocity08_3, moriverDataEntry.Velocity02or06_3, + moriverDataEntry.Watervel, moriverDataEntry.Cobble, moriverDataEntry.Organic, moriverDataEntry.Silt, moriverDataEntry.Sand, moriverDataEntry.Gravel, + moriverDataEntry.Comments, moriverDataEntry.Complete, moriverDataEntry.Checkby, moriverDataEntry.NoTurbidity, moriverDataEntry.NoVelocity, moriverDataEntry.EditInitials, moriverDataEntry.MrFid, moriverDataEntry.SiteID, moriverDataEntry.FieldOffice, + moriverDataEntry.LastEditComment, moriverDataEntry.LastUpdated, moriverDataEntry.UploadedBy, moriverDataEntry.Bend, moriverDataEntry.BendRn, moriverDataEntry.BendRiverMile, moriverDataEntry.MrID) + return err +} + +var moriverDataEntriesByFidSql = `select mr_fid,mr_id,site_id,FIELDOFFICE,PROJECT,SEGMENT,SEASON,setdate, subsample, subsamplepass, +subsamplen, recorder, gear, GEAR_TYPE, temp, turbidity, conductivity, do, distance, width, netrivermile, structurenumber, usgs, riverstage, discharge, +u1, u2, u3, u4, u5, u6, u7, MACRO, MESO, habitatrn, qc, micro_structure, structure_flow, structure_mod, set_site_1, set_site_2, set_site_3, +starttime, startlatitude, startlongitude, stoptime, stoplatitude, stoplongitude, depth1, velocitybot1, velocity08_1, velocity02or06_1, +depth2, velocitybot2, velocity08_2, velocity02or06_2,depth3, velocitybot3, velocity08_3, velocity02or06_3, watervel, cobble, ORGANIC, silt, sand, +gravel, comments, complete, checkby, no_turbidity, no_velocity, edit_initials,last_edit_comment, uploaded_by from ds_moriver +where mr_id = :1` + +var moriverDataEntriesCountByFidSql = `SELECT count(*) FROM ds_moriver where mr_id = :1` + +var moriverDataEntriesByFfidSql = `select mr_fid,mr_id,site_id,FIELDOFFICE,PROJECT,SEGMENT,SEASON,setdate, subsample, subsamplepass, +subsamplen, recorder, gear, GEAR_TYPE, temp, turbidity, conductivity, do, distance, width, netrivermile, structurenumber, usgs, riverstage, discharge, +u1, u2, u3, u4, u5, u6, u7, MACRO, MESO, habitatrn, qc, micro_structure, structure_flow, structure_mod, set_site_1, set_site_2, set_site_3, +starttime, startlatitude, startlongitude, stoptime, stoplatitude, stoplongitude, depth1, velocitybot1, velocity08_1, velocity02or06_1, +depth2, velocitybot2, velocity08_2, velocity02or06_2,depth3, velocitybot3, velocity08_3, velocity02or06_3, watervel, cobble, ORGANIC, silt, sand, +gravel, comments, complete, checkby, no_turbidity, no_velocity, edit_initials,last_edit_comment, uploaded_by from ds_moriver +where mr_fid = :1` + +var moriverDataEntriesCountByFfidSql = `SELECT count(*) FROM ds_moriver where mr_fid = :1` + +func (s *PallidSturgeonStore) GetMoriverDataEntries(tableId string, fieldId string, fieldOfficeCode string, queryParams models.SearchParams) (models.MoriverDataEntryWithCount, error) { + moriverDataEntryWithCount := models.MoriverDataEntryWithCount{} + query := "" + queryWithCount := "" + id := "" + + if tableId != "" { + query = moriverDataEntriesByFidSql + queryWithCount = moriverDataEntriesCountByFidSql + id = tableId + } + + if fieldId != "" { + query = moriverDataEntriesByFfidSql + queryWithCount = moriverDataEntriesCountByFfidSql + id = fieldId + } + + countQuery, err := s.db.Prepare(queryWithCount) + if err != nil { + return moriverDataEntryWithCount, err + } + + countrows, err := countQuery.Query(id) + if err != nil { + return moriverDataEntryWithCount, err + } + defer countrows.Close() + + for countrows.Next() { + err = countrows.Scan(&moriverDataEntryWithCount.TotalCount) + if err != nil { + return moriverDataEntryWithCount, err + } + } + + moriverEntries := []models.UploadMoriver{} + offset := queryParams.PageSize * queryParams.Page + if queryParams.OrderBy == "" { + queryParams.OrderBy = "mr_id desc" + } + moriverDataEntriesSqlWithSearch := query + fmt.Sprintf(" order by %s OFFSET %s ROWS FETCH NEXT %s ROWS ONLY", queryParams.OrderBy, strconv.Itoa(offset), strconv.Itoa(queryParams.PageSize)) + dbQuery, err := s.db.Prepare(moriverDataEntriesSqlWithSearch) + if err != nil { + return moriverDataEntryWithCount, err + } + + rows, err := dbQuery.Query(id) + if err != nil { + return moriverDataEntryWithCount, err + } + defer rows.Close() + + for rows.Next() { + moriverDataEntry := models.UploadMoriver{} + err = rows.Scan(&moriverDataEntry.MrFid, &moriverDataEntry.MrID, &moriverDataEntry.SiteID, &moriverDataEntry.FieldOffice, + &moriverDataEntry.Project, &moriverDataEntry.Segment, &moriverDataEntry.Season, &moriverDataEntry.SetDate, &moriverDataEntry.Subsample, &moriverDataEntry.Subsamplepass, + &moriverDataEntry.Subsamplen, &moriverDataEntry.Recorder, &moriverDataEntry.Gear, &moriverDataEntry.GearType, &moriverDataEntry.Temp, &moriverDataEntry.Turbidity, &moriverDataEntry.Conductivity, &moriverDataEntry.Do, + &moriverDataEntry.Distance, &moriverDataEntry.Width, &moriverDataEntry.Netrivermile, &moriverDataEntry.Structurenumber, &moriverDataEntry.Usgs, &moriverDataEntry.Riverstage, &moriverDataEntry.Discharge, + &moriverDataEntry.U1, &moriverDataEntry.U2, &moriverDataEntry.U3, &moriverDataEntry.U4, &moriverDataEntry.U5, &moriverDataEntry.U6, &moriverDataEntry.U7, &moriverDataEntry.Macro, &moriverDataEntry.Meso, &moriverDataEntry.Habitatrn, &moriverDataEntry.Qc, + &moriverDataEntry.MicroStructure, &moriverDataEntry.StructureFlow, &moriverDataEntry.StructureMod, &moriverDataEntry.SetSite1, &moriverDataEntry.SetSite2, &moriverDataEntry.SetSite3, + &moriverDataEntry.StartTime, &moriverDataEntry.StartLatitude, &moriverDataEntry.StartLongitude, &moriverDataEntry.StopTime, &moriverDataEntry.StopLatitude, &moriverDataEntry.StopLongitude, + &moriverDataEntry.Depth1, &moriverDataEntry.Velocitybot1, &moriverDataEntry.Velocity08_1, &moriverDataEntry.Velocity02or06_1, + &moriverDataEntry.Depth2, &moriverDataEntry.Velocitybot2, &moriverDataEntry.Velocity08_2, &moriverDataEntry.Velocity02or06_2, + &moriverDataEntry.Depth3, &moriverDataEntry.Velocitybot3, &moriverDataEntry.Velocity08_3, &moriverDataEntry.Velocity02or06_3, + &moriverDataEntry.Watervel, &moriverDataEntry.Cobble, &moriverDataEntry.Organic, &moriverDataEntry.Silt, &moriverDataEntry.Sand, &moriverDataEntry.Gravel, + &moriverDataEntry.Comments, &moriverDataEntry.Complete, &moriverDataEntry.Checkby, &moriverDataEntry.NoTurbidity, &moriverDataEntry.NoVelocity, &moriverDataEntry.EditInitials, &moriverDataEntry.LastEditComment, &moriverDataEntry.UploadedBy) + if err != nil { + return moriverDataEntryWithCount, err + } + moriverEntries = append(moriverEntries, moriverDataEntry) + } + + moriverDataEntryWithCount.Items = moriverEntries + + return moriverDataEntryWithCount, err +} + +var insertSupplementalDataSql = `insert into ds_supplemental(f_id, f_fid, mr_id,TAGNUMBER, PITRN,SCUTELOC, SCUTENUM, SCUTELOC2, SCUTENUM2,ELHV, ELCOLOR, ERHV, ERCOLOR, CWTYN, DANGLER, genetic, genetics_vial_number, + BROODSTOCK, HATCH_WILD, species_id,head, snouttomouth, inter, mouthwidth, m_ib,l_ob, l_ib, r_ib,r_ob, anal, dorsal, status, HATCHERY_ORIGIN,SEX, stage, recapture, photo,genetic_needs, other_tag_info,comments, + edit_initials,last_edit_comment, last_updated, uploaded_by, complete, approved, checkby, recorder) + values (:1,:2,:3,:4,:5,:6,:7,:8,:9,:10,:11,:12,:13,:14,:15,:16,:17,:18,:19,:20,:21,:22,:23,:24,:25,:26,:27,:28,:29,:30,:31,:32,:33,:34,:35,:36,:37,:38,:39,:40,:41,:42,:43,:44,:45,:46,:47,:48) returning s_id into :49` + +func (s *PallidSturgeonStore) SaveSupplementalDataEntry(supplementalDataEntry models.UploadSupplemental) (int, error) { + var id int + _, err := s.db.Exec(insertSupplementalDataSql, + supplementalDataEntry.Fid, + supplementalDataEntry.FFid, + supplementalDataEntry.MrId, + supplementalDataEntry.Tagnumber, + supplementalDataEntry.Pitrn, + supplementalDataEntry.Scuteloc, + supplementalDataEntry.Scutenum, + supplementalDataEntry.Scuteloc2, + supplementalDataEntry.Scutenum2, + supplementalDataEntry.Elhv, + supplementalDataEntry.Elcolor, + supplementalDataEntry.Erhv, + supplementalDataEntry.Ercolor, + supplementalDataEntry.Cwtyn, + supplementalDataEntry.Dangler, + supplementalDataEntry.Genetic, + supplementalDataEntry.GeneticsVialNumber, + supplementalDataEntry.Broodstock, + supplementalDataEntry.HatchWild, + supplementalDataEntry.SpeciesId, + supplementalDataEntry.Head, + supplementalDataEntry.Snouttomouth, + supplementalDataEntry.Inter, + supplementalDataEntry.Mouthwidth, + supplementalDataEntry.MIb, + supplementalDataEntry.LOb, + supplementalDataEntry.LIb, + supplementalDataEntry.RIb, + supplementalDataEntry.ROb, + supplementalDataEntry.Anal, + supplementalDataEntry.Dorsal, + supplementalDataEntry.Status, + supplementalDataEntry.HatcheryOrigin, + supplementalDataEntry.Sex, + supplementalDataEntry.Stage, + supplementalDataEntry.Recapture, + supplementalDataEntry.Photo, + supplementalDataEntry.GeneticNeeds, + supplementalDataEntry.OtherTagInfo, + supplementalDataEntry.Comments, + supplementalDataEntry.EditInitials, + supplementalDataEntry.LastEditComment, + supplementalDataEntry.LastUpdated, + supplementalDataEntry.UploadedBy, + supplementalDataEntry.Complete, + supplementalDataEntry.Approved, + supplementalDataEntry.Checkby, + supplementalDataEntry.Recorder, + sql.Out{Dest: &id}, + ) + return id, err +} + +var updateSupplementalDataSql = `UPDATE ds_supplemental SET +f_fid = :2, +mr_id = :3, +TAGNUMBER = :4, +PITRN = :5, +SCUTELOC = :6, +SCUTENUM = :7, +SCUTELOC2 = :8, +SCUTENUM2 = :9, +ELHV = :10, +ELCOLOR = :11, +ERHV = :12, +ERCOLOR = :13, +CWTYN = :14, +DANGLER = :15, +genetic = :16, +genetics_vial_number = :17, +BROODSTOCK = :18, +HATCH_WILD = :19, +head = :20, +snouttomouth = :21, +inter = :22, +mouthwidth = :23, +m_ib = :24, +l_ob = :25, +l_ib = :26, +r_ib = :27, +r_ob = :28, +anal = :29, +dorsal = :30, +status = :31, +HATCHERY_ORIGIN = :32, +SEX = :33, +stage = :34, +recapture = :35, +photo = :36, +genetic_needs = :37, +other_tag_info = :38, +comments = :39, +f_id = :40, +edit_initials = :41, +last_edit_comment = :42, +last_updated = :43, +uploaded_by = :44, +species_id = :45, +complete = :46, +approved = :47, +checkby = :48, +recorder = :49 +WHERE s_id = :1` + +func (s *PallidSturgeonStore) UpdateSupplementalDataEntry(supplementalDataEntry models.UploadSupplemental) error { + _, err := s.db.Exec(updateSupplementalDataSql, + supplementalDataEntry.FFid, + supplementalDataEntry.MrId, + supplementalDataEntry.Tagnumber, + supplementalDataEntry.Pitrn, + supplementalDataEntry.Scuteloc, + supplementalDataEntry.Scutenum, + supplementalDataEntry.Scuteloc2, + supplementalDataEntry.Scutenum2, + supplementalDataEntry.Elhv, + supplementalDataEntry.Elcolor, + supplementalDataEntry.Erhv, + supplementalDataEntry.Ercolor, + supplementalDataEntry.Cwtyn, + supplementalDataEntry.Dangler, + supplementalDataEntry.Genetic, + supplementalDataEntry.GeneticsVialNumber, + supplementalDataEntry.Broodstock, + supplementalDataEntry.HatchWild, + supplementalDataEntry.Head, + supplementalDataEntry.Snouttomouth, + supplementalDataEntry.Inter, + supplementalDataEntry.Mouthwidth, + supplementalDataEntry.MIb, + supplementalDataEntry.LOb, + supplementalDataEntry.LIb, + supplementalDataEntry.RIb, + supplementalDataEntry.ROb, + supplementalDataEntry.Anal, + supplementalDataEntry.Dorsal, + supplementalDataEntry.Status, + supplementalDataEntry.HatcheryOrigin, + supplementalDataEntry.Sex, + supplementalDataEntry.Stage, + supplementalDataEntry.Recapture, + supplementalDataEntry.Photo, + supplementalDataEntry.GeneticNeeds, + supplementalDataEntry.OtherTagInfo, + supplementalDataEntry.Comments, + supplementalDataEntry.Fid, + supplementalDataEntry.EditInitials, + supplementalDataEntry.LastEditComment, + supplementalDataEntry.LastUpdated, + supplementalDataEntry.UploadedBy, + supplementalDataEntry.SpeciesId, + supplementalDataEntry.Complete, + supplementalDataEntry.Approved, + supplementalDataEntry.Checkby, + supplementalDataEntry.Recorder, + supplementalDataEntry.Sid) + return err +} + +var supplementalDataEntriesSql = `select su.s_id, su.f_id, su.f_fid, su.mr_id, si.site_id, mo.netrivermile, fi.length, fi.weight, fi.condition, su.tagnumber, su.pitrn, su.scuteloc, su.scutenum, su.scuteloc2, su.scutenum2, su.elhv, su.elcolor, su.erhv, su.ercolor, su.cwtyn, +su.dangler, su.genetic, su.genetics_vial_number, su.broodstock, su.hatch_wild, su.species_id, fi.species, su.head, su.snouttomouth, su.inter, su.mouthwidth, su.m_ib, su.l_ob, su.l_ib, su.r_ib, su.r_ob, su.anal, su.dorsal, su.status, +su.hatchery_origin, su.sex, su.stage, su.recapture, su.photo, su.genetic_needs, su.other_tag_info, su.comments, su.edit_initials, su.last_edit_comment, su.uploaded_by, su.complete, su.approved, su.checkby, su.recorder from ds_supplemental su +join ds_moriver mo on su.mr_id = mo.mr_id +join ds_sites si on si.site_id = mo.site_id +join ds_fish fi on fi.f_id = su.f_id +where (CASE when :1 != 'ZZ' THEN si.fieldoffice ELSE :2 END) = :3` + +var supplementalDataEntriesCountBySql = `SELECT count(*) from ds_supplemental su +join ds_moriver mo on su.mr_id = mo.mr_id +join ds_sites si on si.site_id = mo.site_id +join ds_fish fi on fi.f_id = su.f_id +where (CASE when :1 != 'ZZ' THEN si.fieldoffice ELSE :2 END) = :3` + +var supplementalDataEntriesByFidSql = `select su.s_id, su.f_id, su.f_fid, su.mr_id, si.site_id, mo.netrivermile, fi.length, fi.weight, fi.condition, su.tagnumber, su.pitrn, su.scuteloc, su.scutenum, su.scuteloc2, su.scutenum2, su.elhv, su.elcolor, su.erhv, su.ercolor, su.cwtyn, +su.dangler, su.genetic, su.genetics_vial_number, su.broodstock, su.hatch_wild, su.species_id, fi.species, su.head, su.snouttomouth, su.inter, su.mouthwidth, su.m_ib, su.l_ob, su.l_ib, su.r_ib, su.r_ob, su.anal, su.dorsal, su.status, +su.hatchery_origin, su.sex, su.stage, su.recapture, su.photo, su.genetic_needs, su.other_tag_info, su.comments, su.edit_initials, su.last_edit_comment, su.uploaded_by, su.complete, su.approved, su.checkby, su.recorder from ds_supplemental su +join ds_moriver mo on su.mr_id = mo.mr_id +join ds_sites si on si.site_id = mo.site_id +join ds_fish fi on fi.f_id = su.f_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and su.f_id = :1` + +var supplementalDataEntriesCountByFidSql = `SELECT count(*) from ds_supplemental su +join ds_moriver mo on su.mr_id = mo.mr_id +join ds_sites si on si.site_id = mo.site_id +join ds_fish fi on fi.f_id = su.f_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and su.f_id = :1` + +var supplementalDataEntriesByFfidSql = `select su.s_id, su.f_id, su.f_fid, su.mr_id, si.site_id, mo.netrivermile, fi.length, fi.weight, fi.condition, su.tagnumber, su.pitrn, su.scuteloc, su.scutenum, su.scuteloc2, su.scutenum2, su.elhv, su.elcolor, su.erhv, su.ercolor, su.cwtyn, +su.dangler, su.genetic, su.genetics_vial_number, su.broodstock, su.hatch_wild, su.species_id, fi.species, su.head, su.snouttomouth, su.inter, su.mouthwidth, su.m_ib, su.l_ob, su.l_ib, su.r_ib, su.r_ob, su.anal, su.dorsal, su.status, +su.hatchery_origin, su.sex, su.stage, su.recapture, su.photo, su.genetic_needs, su.other_tag_info, su.comments, su.edit_initials, su.last_edit_comment, su.uploaded_by, su.complete, su.approved, su.checkby, su.recorder from ds_supplemental su +join ds_moriver mo on su.mr_id = mo.mr_id +join ds_sites si on si.site_id = mo.site_id +join ds_fish fi on fi.f_id = su.f_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and su.f_fid = :1` + +var supplementalDataEntriesCountByFfidSql = `SELECT count(*) from ds_supplemental su +join ds_moriver mo on su.mr_id = mo.mr_id +join ds_sites si on si.site_id = mo.site_id +join ds_fish fi on fi.f_id = su.f_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and su.f_fid = :1` + +var supplementalDataEntriesByGeneticsVialSql = `select su.s_id, su.f_id, su.f_fid, su.mr_id, si.site_id, mo.netrivermile, fi.length, fi.weight, fi.condition, su.tagnumber, su.pitrn, su.scuteloc, su.scutenum, su.scuteloc2, su.scutenum2, su.elhv, su.elcolor, su.erhv, su.ercolor, su.cwtyn, +su.dangler, su.genetic, su.genetics_vial_number, su.broodstock, su.hatch_wild, su.species_id, fi.species, su.head, su.snouttomouth, su.inter, su.mouthwidth, su.m_ib, su.l_ob, su.l_ib, su.r_ib, su.r_ob, su.anal, su.dorsal, su.status, +su.hatchery_origin, su.sex, su.stage, su.recapture, su.photo, su.genetic_needs, su.other_tag_info, su.comments, su.edit_initials, su.last_edit_comment, su.uploaded_by, su.complete, su.approved, su.checkby, su.recorder from ds_supplemental su +join ds_moriver mo on su.mr_id = mo.mr_id +join ds_sites si on si.site_id = mo.site_id +join ds_fish fi on fi.f_id = su.f_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and su.genetics_vial_number = :1` + +var supplementalDataEntriesCountByGeneticsVialSql = `SELECT count(*) from ds_supplemental su +join ds_moriver mo on su.mr_id = mo.mr_id +join ds_sites si on si.site_id = mo.site_id +join ds_fish fi on fi.f_id = su.f_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and su.genetics_vial_number = :1` + +var supplementalDataEntriesByGeneticsPitTagSql = `select su.s_id, su.f_id, su.f_fid, su.mr_id, si.site_id, mo.netrivermile, fi.length, fi.weight, fi.condition, su.tagnumber, su.pitrn, su.scuteloc, su.scutenum, su.scuteloc2, su.scutenum2, su.elhv, su.elcolor, su.erhv, su.ercolor, su.cwtyn, +su.dangler, su.genetic, su.genetics_vial_number, su.broodstock, su.hatch_wild, su.species_id, fi.species, su.head, su.snouttomouth, su.inter, su.mouthwidth, su.m_ib, su.l_ob, su.l_ib, su.r_ib, su.r_ob, su.anal, su.dorsal, su.status, +su.hatchery_origin, su.sex, su.stage, su.recapture, su.photo, su.genetic_needs, su.other_tag_info, su.comments, su.edit_initials, su.last_edit_comment, su.uploaded_by, su.complete, su.approved, su.checkby, su.recorder from ds_supplemental su +join ds_moriver mo on su.mr_id = mo.mr_id +join ds_sites si on si.site_id = mo.site_id +join ds_fish fi on fi.f_id = su.f_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and su.TAGNUMBER = :1` + +var supplementalDataEntriesCountByPitTagSql = `SELECT count(*) from ds_supplemental su +join ds_moriver mo on su.mr_id = mo.mr_id +join ds_sites si on si.site_id = mo.site_id +join ds_fish fi on fi.f_id = su.f_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and su.TAGNUMBER = :1` + +var supplementalDataEntriesByMrIdSql = `select su.s_id, su.f_id, su.f_fid, su.mr_id, si.site_id, mo.netrivermile, fi.length, fi.weight, fi.condition, su.tagnumber, su.pitrn, su.scuteloc, su.scutenum, su.scuteloc2, su.scutenum2, su.elhv, su.elcolor, su.erhv, su.ercolor, su.cwtyn, +su.dangler, su.genetic, su.genetics_vial_number, su.broodstock, su.hatch_wild, su.species_id, fi.species, su.head, su.snouttomouth, su.inter, su.mouthwidth, su.m_ib, su.l_ob, su.l_ib, su.r_ib, su.r_ob, su.anal, su.dorsal, su.status, +su.hatchery_origin, su.sex, su.stage, su.recapture, su.photo, su.genetic_needs, su.other_tag_info, su.comments, su.edit_initials, su.last_edit_comment, su.uploaded_by, su.complete, su.approved, su.checkby, su.recorder from ds_supplemental su +join ds_moriver mo on su.mr_id = mo.mr_id +join ds_sites si on si.site_id = mo.site_id +join ds_fish fi on fi.f_id = su.f_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and su.mr_id = :1` + +var supplementalDataEntriesCountByMrIdSql = `SELECT count(*) from ds_supplemental su +join ds_moriver mo on su.mr_id = mo.mr_id +join ds_sites si on si.site_id = mo.site_id +join ds_fish fi on fi.f_id = su.f_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and su.mr_id = :1` + +var supplementalDataEntriesBySidSql = `select su.s_id, su.f_id, su.f_fid, su.mr_id, si.site_id, mo.netrivermile, fi.length, fi.weight, fi.condition, su.tagnumber, su.pitrn, su.scuteloc, su.scutenum, su.scuteloc2, su.scutenum2, su.elhv, su.elcolor, su.erhv, su.ercolor, su.cwtyn, +su.dangler, su.genetic, su.genetics_vial_number, su.broodstock, su.hatch_wild, su.species_id, fi.species, su.head, su.snouttomouth, su.inter, su.mouthwidth, su.m_ib, su.l_ob, su.l_ib, su.r_ib, su.r_ob, su.anal, su.dorsal, su.status, +su.hatchery_origin, su.sex, su.stage, su.recapture, su.photo, su.genetic_needs, su.other_tag_info, su.comments, su.edit_initials, su.last_edit_comment, su.uploaded_by, su.complete, su.approved, su.checkby, su.recorder from ds_supplemental su +join ds_moriver mo on su.mr_id = mo.mr_id +join ds_sites si on si.site_id = mo.site_id +join ds_fish fi on fi.f_id = su.f_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and su.s_id = :1` + +var supplementalDataEntriesCountBySidSql = `SELECT count(*) from ds_supplemental su +join ds_moriver mo on su.mr_id = mo.mr_id +join ds_sites si on si.site_id = mo.site_id +join ds_fish fi on fi.f_id = su.f_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and su.s_id = :1` + +func (s *PallidSturgeonStore) GetSupplementalDataEntries(tableId string, fieldId string, geneticsVial string, pitTag string, mrId string, fId string, officeCode string, queryParams models.SearchParams) (models.SupplementalDataEntryWithCount, error) { + supplementalDataEntryWithCount := models.SupplementalDataEntryWithCount{} + query := "" + queryWithCount := "" + id := "" + + if tableId != "" { + query = supplementalDataEntriesBySidSql + queryWithCount = supplementalDataEntriesCountBySidSql + id = tableId + } + + if fId != "" { + query = supplementalDataEntriesByFidSql + queryWithCount = supplementalDataEntriesCountByFidSql + id = fId + } + + if fieldId != "" { + query = supplementalDataEntriesByFfidSql + queryWithCount = supplementalDataEntriesCountByFfidSql + id = fieldId + } + + if geneticsVial != "" { + query = supplementalDataEntriesByGeneticsVialSql + queryWithCount = supplementalDataEntriesCountByGeneticsVialSql + id = geneticsVial + } + + if pitTag != "" { + query = supplementalDataEntriesByGeneticsPitTagSql + queryWithCount = supplementalDataEntriesCountByPitTagSql + id = pitTag + } + + if mrId != "" { + query = supplementalDataEntriesByMrIdSql + queryWithCount = supplementalDataEntriesCountByMrIdSql + id = mrId + } + + if tableId == "" && fId == "" && fieldId == "" && geneticsVial == "" && pitTag == "" && mrId == "" { + query = supplementalDataEntriesSql + queryWithCount = supplementalDataEntriesCountBySql + } + + countQuery, err := s.db.Prepare(queryWithCount) + if err != nil { + return supplementalDataEntryWithCount, err + } + + var countrows *sql.Rows + if id == "" { + countrows, err = countQuery.Query(officeCode, officeCode, officeCode) + if err != nil { + return supplementalDataEntryWithCount, err + } + } else { + countrows, err = countQuery.Query(officeCode, officeCode, officeCode, id) + if err != nil { + return supplementalDataEntryWithCount, err + } + } + defer countrows.Close() + + for countrows.Next() { + err = countrows.Scan(&supplementalDataEntryWithCount.TotalCount) + if err != nil { + return supplementalDataEntryWithCount, err + } + } + + supplementalEntries := []models.UploadSupplemental{} + if queryParams.OrderBy == "" { + queryParams.OrderBy = "s_id" + } + supplementalDataEntriesSqlWithSearch := query + fmt.Sprintf(" order by %s", queryParams.OrderBy) + dbQuery, err := s.db.Prepare(supplementalDataEntriesSqlWithSearch) + if err != nil { + return supplementalDataEntryWithCount, err + } + + var rows *sql.Rows + if id == "" { + rows, err = dbQuery.Query(officeCode, officeCode, officeCode) + if err != nil { + return supplementalDataEntryWithCount, err + } + } else { + rows, err = dbQuery.Query(officeCode, officeCode, officeCode, id) + if err != nil { + return supplementalDataEntryWithCount, err + } + } + defer rows.Close() + + for rows.Next() { + supplementalDataEntry := models.UploadSupplemental{} + err = rows.Scan( + &supplementalDataEntry.Sid, + &supplementalDataEntry.Fid, + &supplementalDataEntry.FFid, + &supplementalDataEntry.MrId, + &supplementalDataEntry.SiteID, + &supplementalDataEntry.NetRiverMile, + &supplementalDataEntry.Length, + &supplementalDataEntry.Weight, + &supplementalDataEntry.Condition, + &supplementalDataEntry.Tagnumber, + &supplementalDataEntry.Pitrn, + &supplementalDataEntry.Scuteloc, + &supplementalDataEntry.Scutenum, + &supplementalDataEntry.Scuteloc2, + &supplementalDataEntry.Scutenum2, + &supplementalDataEntry.Elhv, + &supplementalDataEntry.Elcolor, + &supplementalDataEntry.Erhv, + &supplementalDataEntry.Ercolor, + &supplementalDataEntry.Cwtyn, + &supplementalDataEntry.Dangler, + &supplementalDataEntry.Genetic, + &supplementalDataEntry.GeneticsVialNumber, + &supplementalDataEntry.Broodstock, + &supplementalDataEntry.HatchWild, + &supplementalDataEntry.SpeciesId, + &supplementalDataEntry.Species, + &supplementalDataEntry.Head, + &supplementalDataEntry.Snouttomouth, + &supplementalDataEntry.Inter, + &supplementalDataEntry.Mouthwidth, + &supplementalDataEntry.MIb, + &supplementalDataEntry.LOb, + &supplementalDataEntry.LIb, + &supplementalDataEntry.RIb, + &supplementalDataEntry.ROb, + &supplementalDataEntry.Anal, + &supplementalDataEntry.Dorsal, + &supplementalDataEntry.Status, + &supplementalDataEntry.HatcheryOrigin, + &supplementalDataEntry.Sex, + &supplementalDataEntry.Stage, + &supplementalDataEntry.Recapture, + &supplementalDataEntry.Photo, + &supplementalDataEntry.GeneticNeeds, + &supplementalDataEntry.OtherTagInfo, + &supplementalDataEntry.Comments, + &supplementalDataEntry.EditInitials, + &supplementalDataEntry.LastEditComment, + &supplementalDataEntry.UploadedBy, + &supplementalDataEntry.Complete, + &supplementalDataEntry.Approved, + &supplementalDataEntry.Checkby, + &supplementalDataEntry.Recorder, + ) + if err != nil { + return supplementalDataEntryWithCount, err + } + supplementalEntries = append(supplementalEntries, supplementalDataEntry) + } + + supplementalDataEntryWithCount.Items = supplementalEntries + + return supplementalDataEntryWithCount, err +} + +func (s *PallidSturgeonStore) DeleteSupplementalDataEntry(id string) error { + _, err := s.db.Exec("delete from ds_supplemental where s_id = :1", id) + return err +} + +var searchDataEntriesSql = `select SE_FID, SE_ID, CHECKBY, conductivity, EDIT_INITIALS, LAST_EDIT_COMMENT, LAST_UPDATED, RECORDER, SEARCH_DATE, search_day, +SEARCH_TYPE_CODE, SITE_ID, START_LATITUDE, START_LONGITUDE, START_TIME, STOP_LATITUDE, STOP_LONGITUDE, STOP_TIME, temp, UPLOADED_BY, UPLOAD_FILENAME, +UPLOAD_SESSION_ID, ds_id from ds_search` + +var searchDataEntriesCountSql = `select count(*) from ds_search` + +var searchDataEntriesBySeIdSql = `select SE_FID, SE_ID, CHECKBY, conductivity, EDIT_INITIALS, LAST_EDIT_COMMENT, LAST_UPDATED, RECORDER, SEARCH_DATE, search_day, +SEARCH_TYPE_CODE, SITE_ID, START_LATITUDE, START_LONGITUDE, START_TIME, STOP_LATITUDE, STOP_LONGITUDE, STOP_TIME, temp, UPLOADED_BY, UPLOAD_FILENAME, +UPLOAD_SESSION_ID, ds_id from ds_search where se_id = :1` + +var searchDataEntriesCountBySeIdSql = `select count(*) from ds_search where se_id = :1` + +var searchDataEntriesBySiteIdSql = `select SE_FID, SE_ID, CHECKBY, conductivity, EDIT_INITIALS, LAST_EDIT_COMMENT, LAST_UPDATED, RECORDER, SEARCH_DATE, search_day, +SEARCH_TYPE_CODE, SITE_ID, START_LATITUDE, START_LONGITUDE, START_TIME, STOP_LATITUDE, STOP_LONGITUDE, STOP_TIME, temp, UPLOADED_BY, UPLOAD_FILENAME, +UPLOAD_SESSION_ID, ds_id from ds_search where site_id = :1` + +var searchDataEntriesCountBySiteIdSql = `select count(*) from ds_search where site_id = :1` + +func (s *PallidSturgeonStore) GetSearchDataEntries(tableId string, siteId string, queryParams models.SearchParams) (models.SearchDataEntryWithCount, error) { + searchDataEntryWithCount := models.SearchDataEntryWithCount{} + query := "" + queryWithCount := "" + id := "" + + if tableId != "" { + query = searchDataEntriesBySeIdSql + queryWithCount = searchDataEntriesCountBySeIdSql + id = tableId + } + + if siteId != "" { + query = searchDataEntriesBySiteIdSql + queryWithCount = searchDataEntriesCountBySiteIdSql + id = siteId + } + + if tableId == "" && siteId == "" { + query = searchDataEntriesSql + queryWithCount = searchDataEntriesCountSql + } + + countQuery, err := s.db.Prepare(queryWithCount) + if err != nil { + return searchDataEntryWithCount, err + } + + var countrows *sql.Rows + if id == "" { + countrows, err = countQuery.Query() + if err != nil { + return searchDataEntryWithCount, err + } + } else { + countrows, err = countQuery.Query(id) + if err != nil { + return searchDataEntryWithCount, err + } + } + defer countrows.Close() + + for countrows.Next() { + err = countrows.Scan(&searchDataEntryWithCount.TotalCount) + if err != nil { + return searchDataEntryWithCount, err + } + } + + searchEntries := []models.UploadSearch{} + offset := queryParams.PageSize * queryParams.Page + if queryParams.OrderBy == "" { + queryParams.OrderBy = "se_id desc" + } + searchDataEntriesSqlWithSearch := query + fmt.Sprintf(" order by %s OFFSET %s ROWS FETCH NEXT %s ROWS ONLY", queryParams.OrderBy, strconv.Itoa(offset), strconv.Itoa(queryParams.PageSize)) + dbQuery, err := s.db.Prepare(searchDataEntriesSqlWithSearch) + if err != nil { + return searchDataEntryWithCount, err + } + + var rows *sql.Rows + if id == "" { + rows, err = dbQuery.Query() + if err != nil { + return searchDataEntryWithCount, err + } + } else { + rows, err = dbQuery.Query(id) + if err != nil { + return searchDataEntryWithCount, err + } + } + defer rows.Close() + + for rows.Next() { + searchDataEntry := models.UploadSearch{} + err = rows.Scan(&searchDataEntry.SeFid, &searchDataEntry.SeId, &searchDataEntry.Checkby, &searchDataEntry.Conductivity, &searchDataEntry.EditInitials, &searchDataEntry.LastEditComment, &searchDataEntry.LastUpdated, + &searchDataEntry.Recorder, &searchDataEntry.SearchDate, &searchDataEntry.SearchDay, &searchDataEntry.SearchTypeCode, &searchDataEntry.SiteId, &searchDataEntry.StartLatitude, &searchDataEntry.StartLongitude, + &searchDataEntry.StartTime, &searchDataEntry.StopLatitude, &searchDataEntry.StopLongitude, &searchDataEntry.StopTime, &searchDataEntry.Temp, &searchDataEntry.UploadedBy, &searchDataEntry.UploadFilename, + &searchDataEntry.UploadSessionId, &searchDataEntry.DsId) + if err != nil { + return searchDataEntryWithCount, err + } + searchEntries = append(searchEntries, searchDataEntry) + } + + searchDataEntryWithCount.Items = searchEntries + + return searchDataEntryWithCount, err +} + +var insertSearchDataSql = `insert into ds_search (SE_FID, CHECKBY, conductivity, EDIT_INITIALS, LAST_EDIT_COMMENT, LAST_UPDATED, RECORDER, SEARCH_DATE, +SEARCH_TYPE_CODE, SITE_ID, START_LATITUDE, START_LONGITUDE, START_TIME, STOP_LATITUDE, STOP_LONGITUDE, STOP_TIME, temp, UPLOADED_BY, UPLOAD_FILENAME, +UPLOAD_SESSION_ID, ds_id) values (:1,:2,:3,:4,:5,:6,:7,:8,:9,:10,:11,:12,:13,:14,:15,:16,:17,:18,:19,:20,:21) returning se_id into :22` + +func (s *PallidSturgeonStore) SaveSearchDataEntry(searchDataEntry models.UploadSearch) (int, error) { + var id int + _, err := s.db.Exec(insertSearchDataSql, searchDataEntry.SeFid, searchDataEntry.Checkby, searchDataEntry.Conductivity, searchDataEntry.EditInitials, searchDataEntry.LastEditComment, searchDataEntry.LastUpdated, searchDataEntry.Recorder, + searchDataEntry.SearchDate, searchDataEntry.SearchTypeCode, searchDataEntry.SiteId, searchDataEntry.StartLatitude, searchDataEntry.StartLongitude, searchDataEntry.StartTime, searchDataEntry.StopLatitude, + searchDataEntry.StopLongitude, searchDataEntry.StopTime, searchDataEntry.Temp, searchDataEntry.UploadedBy, searchDataEntry.UploadFilename, searchDataEntry.UploadSessionId, searchDataEntry.DsId, sql.Out{Dest: &id}) + return id, err +} + +var updateSearchDataSql = `UPDATE ds_search SET +SE_FID = :2, +CHECKBY = :3, +CONDUCTIVITY = :4, +EDIT_INITIALS = :5, +LAST_EDIT_COMMENT = :6, +LAST_UPDATED = :7, +RECORDER = :8, +SEARCH_DATE = :9, +SEARCH_DAY = :10, +SEARCH_TYPE_CODE = :11, +SITE_ID = :12, +START_LATITUDE = :13, +START_LONGITUDE = :14, +START_TIME = :15, +STOP_LATITUDE = :16, +STOP_LONGITUDE = :17, +STOP_TIME = :18, +TEMP = :19, +UPLOADED_BY = :20, +UPLOAD_FILENAME = :21, +UPLOAD_SESSION_ID = :22 +WHERE SE_ID = :1` + +func (s *PallidSturgeonStore) UpdateSearchDataEntry(searchDataEntry models.UploadSearch) error { + _, err := s.db.Exec(updateSearchDataSql, searchDataEntry.SeFid, searchDataEntry.Checkby, searchDataEntry.Conductivity, searchDataEntry.EditInitials, searchDataEntry.LastEditComment, searchDataEntry.LastUpdated, searchDataEntry.Recorder, + searchDataEntry.SearchDate, searchDataEntry.SearchDay, searchDataEntry.SearchTypeCode, searchDataEntry.SiteId, searchDataEntry.StartLatitude, searchDataEntry.StartLongitude, searchDataEntry.StartTime, searchDataEntry.StopLatitude, + searchDataEntry.StopLongitude, searchDataEntry.StopTime, searchDataEntry.Temp, searchDataEntry.UploadedBy, searchDataEntry.UploadFilename, searchDataEntry.UploadSessionId, searchDataEntry.SeId) + return err +} + +var telemetryDataEntriesSql = `select te.bend,te.CAPTURE_LATITUDE,te.CAPTURE_LONGITUDE,te.CAPTURE_TIME,te.CHECKBY,te.COMMENTS,te.conductivity,te.depth,te.EDIT_INITIALS, +te.FREQUENCY_ID_CODE,te.gravel,te.LAST_EDIT_COMMENT,te.LAST_UPDATED,te.MACRO_ID,te.MESO_ID,te.position_confidence,te.RADIO_TAG_NUM, +te.sand,te.SE_FID,te.SE_ID,te.silt,te.temp,te.turbidity,te.T_FID,te.T_ID,te.UPLOADED_BY,te.UPLOAD_FILENAME, +te.UPLOAD_SESSION_ID, si.site_id from ds_telemetry_fish te +inner join ds_search se on te.se_id = se.se_id +inner join ds_sites si on si.site_id = se.site_id +where (CASE when :1 != 'ZZ' THEN si.fieldoffice ELSE :2 END) = :3` + +var telemetryDataEntriesCountSql = `select count(*) from ds_telemetry_fish te +inner join ds_search se on te.se_id = se.se_id +inner join ds_sites si on si.site_id = se.site_id +where (CASE when :1 != 'ZZ' THEN si.fieldoffice ELSE :2 END) = :3` + +var telemetryDataEntriesBySeIdSql = `select te.bend,te.CAPTURE_LATITUDE,te.CAPTURE_LONGITUDE,te.CAPTURE_TIME,te.CHECKBY,te.COMMENTS,te.conductivity,te.depth,te.EDIT_INITIALS, +te.FREQUENCY_ID_CODE,te.gravel,te.LAST_EDIT_COMMENT,te.LAST_UPDATED,te.MACRO_ID,te.MESO_ID,te.position_confidence,te.RADIO_TAG_NUM, +te.sand,te.SE_FID,te.SE_ID,te.silt,te.temp,te.turbidity,te.T_FID,te.T_ID,te.UPLOADED_BY,te.UPLOAD_FILENAME, +te.UPLOAD_SESSION_ID, si.site_id from ds_telemetry_fish te +inner join ds_search se on te.se_id = se.se_id +inner join ds_sites si on si.site_id = se.site_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and te.se_id = :1` + +var telemetryDataEntriesCountBySeIdSql = `select count(*) from ds_telemetry_fish te +inner join ds_search se on te.se_id = se.se_id +inner join ds_sites si on si.site_id = se.site_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and te.se_id = :1` + +var telemetryDataEntriesByTidSql = `select te.bend,te.CAPTURE_LATITUDE,te.CAPTURE_LONGITUDE,te.CAPTURE_TIME,te.CHECKBY,te.COMMENTS,te.conductivity,te.depth,te.EDIT_INITIALS, +te.FREQUENCY_ID_CODE,te.gravel,te.LAST_EDIT_COMMENT,te.LAST_UPDATED,te.MACRO_ID,te.MESO_ID,te.position_confidence,te.RADIO_TAG_NUM, +te.sand,te.SE_FID,te.SE_ID,te.silt,te.temp,te.turbidity,te.T_FID,te.T_ID,te.UPLOADED_BY,te.UPLOAD_FILENAME, +te.UPLOAD_SESSION_ID, si.site_id from ds_telemetry_fish te +inner join ds_search se on te.se_id = se.se_id +inner join ds_sites si on si.site_id = se.site_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and te.t_id = :1` + +var telemetryDataEntriesCountByTidSql = `select count(*) from ds_telemetry_fish te +inner join ds_search se on te.se_id = se.se_id +inner join ds_sites si on si.site_id = se.site_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and te.t_id = :1` + +func (s *PallidSturgeonStore) GetTelemetryDataEntries(tableId string, seId string, officeCode string, queryParams models.SearchParams) (models.TelemetryDataEntryWithCount, error) { + telemetryDataEntryWithCount := models.TelemetryDataEntryWithCount{} + query := "" + queryWithCount := "" + id := "" + + if tableId != "" { + query = telemetryDataEntriesByTidSql + queryWithCount = telemetryDataEntriesCountByTidSql + id = tableId + } + + if seId != "" { + query = telemetryDataEntriesBySeIdSql + queryWithCount = telemetryDataEntriesCountBySeIdSql + id = seId + } + + if tableId == "" && seId == "" { + query = telemetryDataEntriesSql + queryWithCount = telemetryDataEntriesCountSql + } + + countQuery, err := s.db.Prepare(queryWithCount) + if err != nil { + return telemetryDataEntryWithCount, err + } + + var countrows *sql.Rows + if id == "" { + countrows, err = countQuery.Query(officeCode, officeCode, officeCode) + if err != nil { + return telemetryDataEntryWithCount, err + } + } else { + countrows, err = countQuery.Query(officeCode, officeCode, officeCode, id) + if err != nil { + return telemetryDataEntryWithCount, err + } + } + defer countrows.Close() + + for countrows.Next() { + err = countrows.Scan(&telemetryDataEntryWithCount.TotalCount) + if err != nil { + return telemetryDataEntryWithCount, err + } + } + + telemetryEntries := []models.UploadTelemetry{} + if queryParams.OrderBy == "" { + queryParams.OrderBy = "t_id asc" + } + telemetryDataEntriesSqlWithSearch := query + fmt.Sprintf(" order by %s", queryParams.OrderBy) + dbQuery, err := s.db.Prepare(telemetryDataEntriesSqlWithSearch) + if err != nil { + return telemetryDataEntryWithCount, err + } + + var rows *sql.Rows + if id == "" { + rows, err = dbQuery.Query(officeCode, officeCode, officeCode) + if err != nil { + return telemetryDataEntryWithCount, err + } + } else { + rows, err = dbQuery.Query(officeCode, officeCode, officeCode, id) + if err != nil { + return telemetryDataEntryWithCount, err + } + } + defer rows.Close() + + for rows.Next() { + telemetryDataEntry := models.UploadTelemetry{} + err = rows.Scan(&telemetryDataEntry.Bend, &telemetryDataEntry.CaptureLatitude, &telemetryDataEntry.CaptureLongitude, &telemetryDataEntry.CaptureTime, &telemetryDataEntry.Checkby, &telemetryDataEntry.Comments, &telemetryDataEntry.Conductivity, &telemetryDataEntry.Depth, + &telemetryDataEntry.EditInitials, &telemetryDataEntry.FrequencyIdCode, &telemetryDataEntry.Gravel, &telemetryDataEntry.LastEditComment, &telemetryDataEntry.LastUpdated, &telemetryDataEntry.MacroId, &telemetryDataEntry.MesoId, &telemetryDataEntry.PositionConfidence, + &telemetryDataEntry.RadioTagNum, &telemetryDataEntry.Sand, &telemetryDataEntry.SeFid, &telemetryDataEntry.SeId, &telemetryDataEntry.Silt, &telemetryDataEntry.Temp, &telemetryDataEntry.Turbidity, &telemetryDataEntry.TFid, &telemetryDataEntry.TId, &telemetryDataEntry.UploadedBy, + &telemetryDataEntry.UploadFilename, &telemetryDataEntry.UploadSessionId, &telemetryDataEntry.SiteId) + if err != nil { + return telemetryDataEntryWithCount, err + } + telemetryEntries = append(telemetryEntries, telemetryDataEntry) + } + + telemetryDataEntryWithCount.Items = telemetryEntries + + return telemetryDataEntryWithCount, err +} + +var insertTelemetryDataSql = `insert into ds_telemetry_fish (BEND,CAPTURE_LATITUDE,CAPTURE_LONGITUDE,CAPTURE_TIME,CHECKBY,COMMENTS,conductivity,depth,EDIT_INITIALS,FREQUENCY_ID_CODE,gravel,LAST_EDIT_COMMENT, + LAST_UPDATED,MACRO_ID,MESO_ID,POSITION_CONFIDENCE,RADIO_TAG_NUM,sand,SE_FID,SE_ID,silt,temp,turbidity,T_FID,UPLOADED_BY,UPLOAD_FILENAME,UPLOAD_SESSION_ID) + values (:1,:2,:3,:4,:5,:6,:7,:8,:9,:10,:11,:12,:13,:14,:15,:16,:17,:18,:19,:20,:21,:22,:23,:24,:25,:26,:27) returning t_id into :28` + +func (s *PallidSturgeonStore) SaveTelemetryDataEntry(telemetryDataEntry models.UploadTelemetry) (int, error) { + var id int + _, err := s.db.Exec(insertTelemetryDataSql, telemetryDataEntry.Bend, telemetryDataEntry.CaptureLatitude, telemetryDataEntry.CaptureLongitude, telemetryDataEntry.CaptureTime, telemetryDataEntry.Checkby, telemetryDataEntry.Comments, telemetryDataEntry.Conductivity, telemetryDataEntry.Depth, + telemetryDataEntry.EditInitials, telemetryDataEntry.FrequencyIdCode, telemetryDataEntry.Gravel, telemetryDataEntry.LastEditComment, telemetryDataEntry.LastUpdated, telemetryDataEntry.MacroId, telemetryDataEntry.MesoId, telemetryDataEntry.PositionConfidence, telemetryDataEntry.RadioTagNum, + telemetryDataEntry.Sand, telemetryDataEntry.SeFid, telemetryDataEntry.SeId, telemetryDataEntry.Silt, telemetryDataEntry.Temp, telemetryDataEntry.Turbidity, telemetryDataEntry.TFid, telemetryDataEntry.UploadedBy, telemetryDataEntry.UploadFilename, telemetryDataEntry.UploadSessionId, sql.Out{Dest: &id}) + return id, err +} + +var updateTelemetryDataSql = `UPDATE ds_telemetry_fish SET +BEND = :2, +CAPTURE_LATITUDE = :3, +CAPTURE_LONGITUDE = :4, +CAPTURE_TIME = :5, +CHECKBY = :6, +COMMENTS = :7, +CONDUCTIVITY = :8, +DEPTH = :9, +EDIT_INITIALS = :10, +FREQUENCY_ID_CODE = :11, +GRAVEL = :12, +LAST_EDIT_COMMENT = :13, +LAST_UPDATED = :14, +MACRO_ID = :15, +MESO_ID = :16, +POSITION_CONFIDENCE = :17, +RADIO_TAG_NUM = :18, +SAND = :19, +SE_FID = :20, +SE_ID = :21, +SILT = :22, +TEMP = :23, +TURBIDITY = :24, +T_FID = :25, +UPLOADED_BY = :26, +UPLOAD_FILENAME = :27, +UPLOAD_SESSION_ID = :28 +WHERE T_ID = :1` + +func (s *PallidSturgeonStore) UpdateTelemetryDataEntry(telemetryDataEntry models.UploadTelemetry) error { + _, err := s.db.Exec(updateTelemetryDataSql, telemetryDataEntry.Bend, telemetryDataEntry.CaptureLatitude, telemetryDataEntry.CaptureLongitude, telemetryDataEntry.CaptureTime, telemetryDataEntry.Checkby, telemetryDataEntry.Comments, telemetryDataEntry.Conductivity, telemetryDataEntry.Depth, + telemetryDataEntry.EditInitials, telemetryDataEntry.FrequencyIdCode, telemetryDataEntry.Gravel, telemetryDataEntry.LastEditComment, telemetryDataEntry.LastUpdated, telemetryDataEntry.MacroId, telemetryDataEntry.MesoId, telemetryDataEntry.PositionConfidence, telemetryDataEntry.RadioTagNum, + telemetryDataEntry.Sand, telemetryDataEntry.SeFid, telemetryDataEntry.SeId, telemetryDataEntry.Silt, telemetryDataEntry.Temp, telemetryDataEntry.Turbidity, telemetryDataEntry.TFid, telemetryDataEntry.UploadedBy, telemetryDataEntry.UploadFilename, telemetryDataEntry.UploadSessionId, telemetryDataEntry.TId) + return err +} + +func (s *PallidSturgeonStore) DeleteTelemetryDataEntry(id string) error { + _, err := s.db.Exec("delete from ds_telemetry_fish where t_id = :1", id) + return err +} + +var procedureDataEntriesSql = `select pr.ID, pr.F_ID, pr.F_FID, si.site_id, pr.PURPOSE_CODE, pr.PROCEDURE_DATE, pr.PROCEDURE_START_TIME, pr.PROCEDURE_END_TIME, pr.PROCEDURE_BY, pr.ANTIBIOTIC_INJECTION_IND, pr.PHOTO_DORSAL_IND, pr.PHOTO_VENTRAL_IND, +pr.PHOTO_LEFT_IND, pr.OLD_RADIO_TAG_NUM, pr.OLD_FREQUENCY_ID, pr.DST_SERIAL_NUM, pr.DST_START_TIME, pr.DST_REIMPLANT_IND, pr.NEW_RADIO_TAG_NUM, pr.NEW_FREQUENCY_ID, pr.SEX_CODE, pr.COMMENTS, pr.FISH_HEALTH_COMMENTS, pr.SPAWN_CODE, pr.EVAL_LOCATION_CODE, +pr.BLOOD_SAMPLE_IND, pr.EGG_SAMPLE_IND, pr.VISUAL_REPRO_STATUS_CODE, pr.ULTRASOUND_REPRO_STATUS_CODE, pr.ULTRASOUND_GONAD_LENGTH, pr.GONAD_CONDITION, pr.EXPECTED_SPAWN_YEAR, pr.LAST_UPDATED, pr.UPLOAD_SESSION_ID, pr.UPLOADED_BY, pr.UPLOAD_FILENAME, +pr.CHECKBY, pr.EDIT_INITIALS, pr.LAST_EDIT_COMMENT, pr.MR_FID, pr.dst_start_date, pr.s_id from ds_procedure pr +inner join ds_fish fi on fi.f_id = pr.f_id +inner join ds_moriver mo on mo.mr_id = fi.mr_id +inner join ds_sites si on si.site_id = mo.site_id +where (CASE when :1 != 'ZZ' THEN si.fieldoffice ELSE :2 END) = :3` + +var procedureDataEntriesCountBySql = `SELECT count(*) from ds_procedure pr +inner join ds_fish fi on fi.f_id = pr.f_id +inner join ds_moriver mo on mo.mr_id = fi.mr_id +inner join ds_sites si on si.site_id = mo.site_id +where (CASE when :1 != 'ZZ' THEN si.fieldoffice ELSE :2 END) = :3` + +var procedureDataEntriesByIdSql = `select pr.ID, pr.F_ID, pr.F_FID, si.site_id, pr.PURPOSE_CODE, pr.PROCEDURE_DATE, pr.PROCEDURE_START_TIME, pr.PROCEDURE_END_TIME, pr.PROCEDURE_BY, pr.ANTIBIOTIC_INJECTION_IND, pr.PHOTO_DORSAL_IND, pr.PHOTO_VENTRAL_IND, +pr.PHOTO_LEFT_IND, pr.OLD_RADIO_TAG_NUM, pr.OLD_FREQUENCY_ID, pr.DST_SERIAL_NUM, pr.DST_START_TIME, pr.DST_REIMPLANT_IND, pr.NEW_RADIO_TAG_NUM, pr.NEW_FREQUENCY_ID, pr.SEX_CODE, pr.COMMENTS, pr.FISH_HEALTH_COMMENTS, pr.SPAWN_CODE, pr.EVAL_LOCATION_CODE, +pr.BLOOD_SAMPLE_IND, pr.EGG_SAMPLE_IND, pr.VISUAL_REPRO_STATUS_CODE, pr.ULTRASOUND_REPRO_STATUS_CODE, pr.ULTRASOUND_GONAD_LENGTH, pr.GONAD_CONDITION, pr.EXPECTED_SPAWN_YEAR, pr.LAST_UPDATED, pr.UPLOAD_SESSION_ID, pr.UPLOADED_BY, pr.UPLOAD_FILENAME, +pr.CHECKBY, pr.EDIT_INITIALS, pr.LAST_EDIT_COMMENT, pr.MR_FID, pr.dst_start_date, pr.s_id from ds_procedure pr +inner join ds_fish fi on fi.f_id = pr.f_id +inner join ds_moriver mo on mo.mr_id = fi.mr_id +inner join ds_sites si on si.site_id = mo.site_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and pr.id = :1` + +var procedureDataEntriesCountByIdSql = `SELECT count(*) from ds_procedure pr +inner join ds_fish fi on fi.f_id = pr.f_id +inner join ds_moriver mo on mo.mr_id = fi.mr_id +inner join ds_sites si on si.site_id = mo.site_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and pr.id = :1` + +var procedureDataEntriesByFidSql = `select pr.ID, pr.F_ID, pr.F_FID, si.site_id, pr.PURPOSE_CODE, pr.PROCEDURE_DATE, pr.PROCEDURE_START_TIME, pr.PROCEDURE_END_TIME, pr.PROCEDURE_BY, pr.ANTIBIOTIC_INJECTION_IND, pr.PHOTO_DORSAL_IND, pr.PHOTO_VENTRAL_IND, +pr.PHOTO_LEFT_IND, pr.OLD_RADIO_TAG_NUM, pr.OLD_FREQUENCY_ID, pr.DST_SERIAL_NUM, pr.DST_START_TIME, pr.DST_REIMPLANT_IND, pr.NEW_RADIO_TAG_NUM, pr.NEW_FREQUENCY_ID, pr.SEX_CODE, pr.COMMENTS, pr.FISH_HEALTH_COMMENTS, pr.SPAWN_CODE, pr.EVAL_LOCATION_CODE, +pr.BLOOD_SAMPLE_IND, pr.EGG_SAMPLE_IND, pr.VISUAL_REPRO_STATUS_CODE, pr.ULTRASOUND_REPRO_STATUS_CODE, pr.ULTRASOUND_GONAD_LENGTH, pr.GONAD_CONDITION, pr.EXPECTED_SPAWN_YEAR, pr.LAST_UPDATED, pr.UPLOAD_SESSION_ID, pr.UPLOADED_BY, pr.UPLOAD_FILENAME, +pr.CHECKBY, pr.EDIT_INITIALS, pr.LAST_EDIT_COMMENT, pr.MR_FID, pr.dst_start_date, pr.s_id from ds_procedure pr +inner join ds_fish fi on fi.f_id = pr.f_id +inner join ds_moriver mo on mo.mr_id = fi.mr_id +inner join ds_sites si on si.site_id = mo.site_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and pr.f_id = :1` + +var procedureDataEntriesCountByFidSql = `SELECT count(*) from ds_procedure pr +inner join ds_fish fi on fi.f_id = pr.f_id +inner join ds_moriver mo on mo.mr_id = fi.mr_id +inner join ds_sites si on si.site_id = mo.site_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and pr.f_id = :1` + +var procedureDataEntriesByFfidSql = `select pr.ID, pr.F_ID, pr.F_FID, si.site_id, pr.PURPOSE_CODE, pr.PROCEDURE_DATE, pr.PROCEDURE_START_TIME, pr.PROCEDURE_END_TIME, pr.PROCEDURE_BY, pr.ANTIBIOTIC_INJECTION_IND, pr.PHOTO_DORSAL_IND, pr.PHOTO_VENTRAL_IND, +pr.PHOTO_LEFT_IND, pr.OLD_RADIO_TAG_NUM, pr.OLD_FREQUENCY_ID, pr.DST_SERIAL_NUM, pr.DST_START_TIME, pr.DST_REIMPLANT_IND, pr.NEW_RADIO_TAG_NUM, pr.NEW_FREQUENCY_ID, pr.SEX_CODE, pr.COMMENTS, pr.FISH_HEALTH_COMMENTS, pr.SPAWN_CODE, pr.EVAL_LOCATION_CODE, +pr.BLOOD_SAMPLE_IND, pr.EGG_SAMPLE_IND, pr.VISUAL_REPRO_STATUS_CODE, pr.ULTRASOUND_REPRO_STATUS_CODE, pr.ULTRASOUND_GONAD_LENGTH, pr.GONAD_CONDITION, pr.EXPECTED_SPAWN_YEAR, pr.LAST_UPDATED, pr.UPLOAD_SESSION_ID, pr.UPLOADED_BY, pr.UPLOAD_FILENAME, +pr.CHECKBY, pr.EDIT_INITIALS, pr.LAST_EDIT_COMMENT, pr.MR_FID, pr.dst_start_date, pr.s_id from ds_procedure pr +inner join ds_fish fi on fi.f_id = pr.f_id +inner join ds_moriver mo on mo.mr_id = fi.mr_id +inner join ds_sites si on si.site_id = mo.site_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and pr.f_fid = :1` + +var procedureDataEntriesCountByFfidSql = `SELECT count(*) from ds_procedure pr +inner join ds_fish fi on fi.f_id = pr.f_id +inner join ds_moriver mo on mo.mr_id = fi.mr_id +inner join ds_sites si on si.site_id = mo.site_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and pr.f_fid = :1` + +var procedureDataEntriesByMrIdSql = `select pr.ID, pr.F_ID, pr.F_FID, si.site_id, pr.PURPOSE_CODE, pr.PROCEDURE_DATE, pr.PROCEDURE_START_TIME, pr.PROCEDURE_END_TIME, pr.PROCEDURE_BY, pr.ANTIBIOTIC_INJECTION_IND, pr.PHOTO_DORSAL_IND, pr.PHOTO_VENTRAL_IND, +pr.PHOTO_LEFT_IND, pr.OLD_RADIO_TAG_NUM, pr.OLD_FREQUENCY_ID, pr.DST_SERIAL_NUM, pr.DST_START_TIME, pr.DST_REIMPLANT_IND, pr.NEW_RADIO_TAG_NUM, pr.NEW_FREQUENCY_ID, pr.SEX_CODE, pr.COMMENTS, pr.FISH_HEALTH_COMMENTS, pr.SPAWN_CODE, pr.EVAL_LOCATION_CODE, +pr.BLOOD_SAMPLE_IND, pr.EGG_SAMPLE_IND, pr.VISUAL_REPRO_STATUS_CODE, pr.ULTRASOUND_REPRO_STATUS_CODE, pr.ULTRASOUND_GONAD_LENGTH, pr.GONAD_CONDITION, pr.EXPECTED_SPAWN_YEAR, pr.LAST_UPDATED, pr.UPLOAD_SESSION_ID, pr.UPLOADED_BY, pr.UPLOAD_FILENAME, +pr.CHECKBY, pr.EDIT_INITIALS, pr.LAST_EDIT_COMMENT, pr.MR_FID, pr.dst_start_date, pr.s_id from ds_procedure pr +inner join ds_fish fi on fi.f_id = pr.f_id +inner join ds_moriver mo on mo.mr_id = fi.mr_id +inner join ds_sites si on si.site_id = mo.site_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and fi.mr_id = :1` + +var procedureDataEntriesCountByMrIdSql = `SELECT count(*) from ds_procedure pr +inner join ds_fish fi on fi.f_id = pr.f_id +inner join ds_moriver mo on mo.mr_id = fi.mr_id +inner join ds_sites si on si.site_id = mo.site_id +where (CASE when :2 != 'ZZ' THEN si.fieldoffice ELSE :3 END) = :4 +and fi.mr_id = :1` + +func (s *PallidSturgeonStore) GetProcedureDataEntries(tableId string, fId string, mrId string, officeCode string, queryParams models.SearchParams) (models.ProcedureDataEntryWithCount, error) { + procedureDataEntryWithCount := models.ProcedureDataEntryWithCount{} + query := "" + queryWithCount := "" + id := "" + + if tableId != "" { + query = procedureDataEntriesByIdSql + queryWithCount = procedureDataEntriesCountByIdSql + id = tableId + } + + if fId != "" { + query = procedureDataEntriesByFidSql + queryWithCount = procedureDataEntriesCountByFidSql + id = fId + } + + if mrId != "" { + query = procedureDataEntriesByMrIdSql + queryWithCount = procedureDataEntriesCountByMrIdSql + id = mrId + } + + if tableId == "" && fId == "" && mrId == "" { + query = procedureDataEntriesSql + queryWithCount = procedureDataEntriesCountBySql + } + + countQuery, err := s.db.Prepare(queryWithCount) + if err != nil { + return procedureDataEntryWithCount, err + } + + var countrows *sql.Rows + if id == "" { + countrows, err = countQuery.Query(officeCode, officeCode, officeCode) + if err != nil { + return procedureDataEntryWithCount, err + } + } else { + countrows, err = countQuery.Query(officeCode, officeCode, officeCode, id) + if err != nil { + return procedureDataEntryWithCount, err + } + } + defer countrows.Close() + + for countrows.Next() { + err = countrows.Scan(&procedureDataEntryWithCount.TotalCount) + if err != nil { + return procedureDataEntryWithCount, err + } + } + + procedureEntries := []models.UploadProcedure{} + if queryParams.OrderBy == "" { + queryParams.OrderBy = "id" + } + procedureDataEntriesSqlWithSearch := query + fmt.Sprintf(" order by %s", queryParams.OrderBy) + dbQuery, err := s.db.Prepare(procedureDataEntriesSqlWithSearch) + if err != nil { + return procedureDataEntryWithCount, err + } + + var rows *sql.Rows + if id == "" { + rows, err = dbQuery.Query(officeCode, officeCode, officeCode) + if err != nil { + return procedureDataEntryWithCount, err + } + } else { + rows, err = dbQuery.Query(officeCode, officeCode, officeCode, id) + if err != nil { + return procedureDataEntryWithCount, err + } + } + defer rows.Close() + + for rows.Next() { + procedureDataEntry := models.UploadProcedure{} + err = rows.Scan(&procedureDataEntry.Id, &procedureDataEntry.Fid, &procedureDataEntry.FFid, &procedureDataEntry.SiteID, &procedureDataEntry.PurposeCode, &procedureDataEntry.ProcedureDate, &procedureDataEntry.ProcedureStartTime, &procedureDataEntry.ProcedureEndTime, &procedureDataEntry.ProcedureBy, &procedureDataEntry.AntibioticInjectionInd, + &procedureDataEntry.PhotoDorsalInd, &procedureDataEntry.PhotoVentralInd, &procedureDataEntry.PhotoLeftInd, &procedureDataEntry.OldRadioTagNum, &procedureDataEntry.OldFrequencyId, &procedureDataEntry.DstSerialNum, &procedureDataEntry.DstStartTime, &procedureDataEntry.DstReimplantInd, + &procedureDataEntry.NewRadioTagNum, &procedureDataEntry.NewFrequencyId, &procedureDataEntry.SexCode, &procedureDataEntry.Comments, &procedureDataEntry.FishHealthComments, &procedureDataEntry.SpawnStatus, &procedureDataEntry.EvalLocationCode, &procedureDataEntry.BloodSampleInd, &procedureDataEntry.EggSampleInd, + &procedureDataEntry.VisualReproStatusCode, &procedureDataEntry.UltrasoundReproStatusCode, &procedureDataEntry.UltrasoundGonadLength, &procedureDataEntry.GonadCondition, &procedureDataEntry.ExpectedSpawnYear, &procedureDataEntry.LastUpdated, &procedureDataEntry.UploadSessionId, &procedureDataEntry.UploadedBy, + &procedureDataEntry.UploadFilename, &procedureDataEntry.Checkby, &procedureDataEntry.EditInitials, &procedureDataEntry.LastEditComment, &procedureDataEntry.MrFid, &procedureDataEntry.DstStartDate, &procedureDataEntry.Sid) + if err != nil { + return procedureDataEntryWithCount, err + } + procedureEntries = append(procedureEntries, procedureDataEntry) + } + + procedureDataEntryWithCount.Items = procedureEntries + + return procedureDataEntryWithCount, err +} + +var insertProcedureDataSql = `insert into ds_procedure (S_ID, F_ID, F_FID, PURPOSE_CODE, PROCEDURE_DATE, PROCEDURE_START_TIME, PROCEDURE_END_TIME, PROCEDURE_BY, ANTIBIOTIC_INJECTION_IND, PHOTO_DORSAL_IND, PHOTO_VENTRAL_IND, PHOTO_LEFT_IND, OLD_RADIO_TAG_NUM, OLD_FREQUENCY_ID, DST_SERIAL_NUM, DST_START_DATE, DST_START_TIME, + DST_REIMPLANT_IND, NEW_RADIO_TAG_NUM, NEW_FREQUENCY_ID, SEX_CODE, COMMENTS, FISH_HEALTH_COMMENTS, SPAWN_CODE, EVAL_LOCATION_CODE, BLOOD_SAMPLE_IND, EGG_SAMPLE_IND, VISUAL_REPRO_STATUS_CODE, ULTRASOUND_REPRO_STATUS_CODE, ULTRASOUND_GONAD_LENGTH, GONAD_CONDITION, EXPECTED_SPAWN_YEAR, LAST_UPDATED, UPLOAD_SESSION_ID, UPLOADED_BY, + UPLOAD_FILENAME, CHECKBY, EDIT_INITIALS, LAST_EDIT_COMMENT, MR_FID) + values (:1,:2,:3,:4,:5,:6,:7,:8,:9,:10,:11,:12,:13,:14,:15,:16,:17,:18,:19,:20,:21,:22,:23,:24,:25,:26,:27,:28,:29,:30,:31,:32,:33,:34,:35,:36,:37,:38,:39,:40) returning id into :41` + +func (s *PallidSturgeonStore) SaveProcedureDataEntry(procedureDataEntry models.UploadProcedure) (int, error) { + var id int + _, err := s.db.Exec(insertProcedureDataSql, procedureDataEntry.Sid, procedureDataEntry.Fid, procedureDataEntry.FFid, procedureDataEntry.PurposeCode, procedureDataEntry.ProcedureDate, procedureDataEntry.ProcedureStartTime, procedureDataEntry.ProcedureEndTime, procedureDataEntry.ProcedureBy, procedureDataEntry.AntibioticInjectionInd, + procedureDataEntry.PhotoDorsalInd, procedureDataEntry.PhotoVentralInd, procedureDataEntry.PhotoLeftInd, procedureDataEntry.OldRadioTagNum, procedureDataEntry.OldFrequencyId, procedureDataEntry.DstSerialNum, procedureDataEntry.DstStartDate, procedureDataEntry.DstStartTime, procedureDataEntry.DstReimplantInd, + procedureDataEntry.NewRadioTagNum, procedureDataEntry.NewFrequencyId, procedureDataEntry.SexCode, procedureDataEntry.Comments, procedureDataEntry.FishHealthComments, procedureDataEntry.SpawnStatus, procedureDataEntry.EvalLocationCode, procedureDataEntry.BloodSampleInd, procedureDataEntry.EggSampleInd, + procedureDataEntry.VisualReproStatusCode, procedureDataEntry.UltrasoundReproStatusCode, procedureDataEntry.UltrasoundGonadLength, procedureDataEntry.GonadCondition, procedureDataEntry.ExpectedSpawnYear, procedureDataEntry.LastUpdated, procedureDataEntry.UploadSessionId, procedureDataEntry.UploadedBy, + procedureDataEntry.UploadFilename, procedureDataEntry.Checkby, procedureDataEntry.EditInitials, procedureDataEntry.LastEditComment, procedureDataEntry.MrFid, sql.Out{Dest: &id}) + return id, err +} + +var updateProcedureDataSql = `update ds_procedure set +f_id = :2, +f_fid =:3, +PURPOSE_CODE = :4, +PROCEDURE_DATE = :5, +PROCEDURE_START_TIME = :6, +PROCEDURE_END_TIME = :7, +PROCEDURE_BY = :8, +ANTIBIOTIC_INJECTION_IND = :9, +PHOTO_DORSAL_IND = :10, +PHOTO_VENTRAL_IND = :11, +PHOTO_LEFT_IND = :12, +OLD_RADIO_TAG_NUM = :13, +OLD_FREQUENCY_ID = :14, +DST_SERIAL_NUM = :15, +DST_START_DATE = :16, +DST_START_TIME = :17, +DST_REIMPLANT_IND = :18, +NEW_RADIO_TAG_NUM = :19, +NEW_FREQUENCY_ID = :20, +SEX_CODE = :21, +COMMENTS = :22, +FISH_HEALTH_COMMENTS = :23, +SPAWN_CODE = :24, +EVAL_LOCATION_CODE = :25, +BLOOD_SAMPLE_IND = :26, +EGG_SAMPLE_IND = :27, +VISUAL_REPRO_STATUS_CODE = :28, +ULTRASOUND_REPRO_STATUS_CODE = :29, +ULTRASOUND_GONAD_LENGTH = :30, +GONAD_CONDITION = :31, +EXPECTED_SPAWN_YEAR = :32, +LAST_UPDATED = :33, +UPLOAD_SESSION_ID = :34, +UPLOADED_BY = :35, +UPLOAD_FILENAME = :36, +CHECKBY = :37, +EDIT_INITIALS = :38, +LAST_EDIT_COMMENT = :39, +mr_fid = :40, +s_id = :41 +where id = :1` + +func (s *PallidSturgeonStore) UpdateProcedureDataEntry(procedureDataEntry models.UploadProcedure) error { + _, err := s.db.Exec(updateProcedureDataSql, procedureDataEntry.Fid, procedureDataEntry.FFid, procedureDataEntry.PurposeCode, procedureDataEntry.ProcedureDate, procedureDataEntry.ProcedureStartTime, procedureDataEntry.ProcedureEndTime, procedureDataEntry.ProcedureBy, procedureDataEntry.AntibioticInjectionInd, + procedureDataEntry.PhotoDorsalInd, procedureDataEntry.PhotoVentralInd, procedureDataEntry.PhotoLeftInd, procedureDataEntry.OldRadioTagNum, procedureDataEntry.OldFrequencyId, procedureDataEntry.DstSerialNum, procedureDataEntry.DstStartDate, procedureDataEntry.DstStartTime, procedureDataEntry.DstReimplantInd, + procedureDataEntry.NewRadioTagNum, procedureDataEntry.NewFrequencyId, procedureDataEntry.SexCode, procedureDataEntry.Comments, procedureDataEntry.FishHealthComments, procedureDataEntry.SpawnStatus, procedureDataEntry.EvalLocationCode, procedureDataEntry.BloodSampleInd, procedureDataEntry.EggSampleInd, + procedureDataEntry.VisualReproStatusCode, procedureDataEntry.UltrasoundReproStatusCode, procedureDataEntry.UltrasoundGonadLength, procedureDataEntry.GonadCondition, procedureDataEntry.ExpectedSpawnYear, procedureDataEntry.LastUpdated, procedureDataEntry.UploadSessionId, procedureDataEntry.UploadedBy, + procedureDataEntry.UploadFilename, procedureDataEntry.Checkby, procedureDataEntry.EditInitials, procedureDataEntry.LastEditComment, procedureDataEntry.MrFid, procedureDataEntry.Sid, procedureDataEntry.Id) + return err +} + +func (s *PallidSturgeonStore) DeleteProcedureDataEntry(id string) error { + _, err := s.db.Exec("delete from ds_procedure where id = :1", id) + return err +} + +var fishDataSummaryFullDataSql = `select * FROM table (pallid_data_api.fish_datasummary_fnc(:1, :2, :3, :4, :5, :6, :7, to_date(:8,'MM/DD/YYYY'), to_date(:9,'MM/DD/YYYY')))` + +func (s *PallidSturgeonStore) GetFullFishDataSummary(year string, officeCode string, project string, approved string, season string, spice string, month string, fromDate string, toDate string) (string, error) { + dbQuery, err := s.db.Prepare(fishDataSummaryFullDataSql) + if err != nil { + log.Fatal("Cannot create to file", err) + } + + rows, err := dbQuery.Query(year, officeCode, project, approved, season, spice, month, fromDate, toDate) + if err != nil { + log.Fatal("Cannot create to file", err) + } + defer rows.Close() + + cols, _ := rows.Columns() + + file, err := os.Create("FishDataSummary.csv") + if err != nil { + log.Fatal("Cannot create file", err) + } + + defer file.Close() + + writer := csv.NewWriter(file) + defer writer.Flush() + + //save header + data := make([]string, 0) + data = append(data, cols...) + err = writer.Write(data) + if err != nil { + log.Fatal("Cannot write to file", err) + } + + for rows.Next() { + + columns := make([]interface{}, len(cols)) + columnPointers := make([]interface{}, len(cols)) + for i := range columns { + columnPointers[i] = &columns[i] + } + + rows.Scan(columnPointers...) + + data := make([]string, 0) + for i := range cols { + var v string + val := columns[i] + + if val == nil { + v = "" + } else { + v = fmt.Sprintf("%v", val) + } + data = append(data, v) + } + + err := writer.Write(data) + if err != nil { + log.Fatal("Cannot write to file", err) + } + } + + return file.Name(), err +} + +var fishDataSummarySql = `SELECT mr_id, f_id, year, FIELD_OFFICE_CODE, PROJECT_CODE, SEGMENT_CODE, SEASON_CODE, BEND_NUMBER, BEND_R_OR_N, bend_river_mile, panelhook, SPECIES_CODE, HATCHERY_ORIGIN_CODE, checkby FROM table (pallid_data_api.fish_datasummary_fnc(:1, :2, :3, :4, :5, :6, :7, to_date(:8,'MM/DD/YYYY'), to_date(:9,'MM/DD/YYYY')))` + +var fishDataSummaryCountSql = `SELECT count(*) FROM table (pallid_data_api.fish_datasummary_fnc(:1, :2, :3, :4, :5, :6, :7, to_date(:8,'MM/DD/YYYY'), to_date(:9,'MM/DD/YYYY')))` + +func (s *PallidSturgeonStore) GetFishDataSummary(year string, officeCode string, project string, approved string, season string, spice string, month string, fromDate string, toDate string, queryParams models.SearchParams) (models.FishSummaryWithCount, error) { + fishSummariesWithCount := models.FishSummaryWithCount{} + countQuery, err := s.db.Prepare(fishDataSummaryCountSql) + if err != nil { + return fishSummariesWithCount, err + } + + countrows, err := countQuery.Query(year, officeCode, project, approved, season, spice, month, fromDate, toDate) + if err != nil { + return fishSummariesWithCount, err + } + defer countrows.Close() + + for countrows.Next() { + err = countrows.Scan(&fishSummariesWithCount.TotalCount) + if err != nil { + return fishSummariesWithCount, err + } + } + + fishSummaries := []models.FishSummary{} + offset := queryParams.PageSize * queryParams.Page + if queryParams.OrderBy == "" { + queryParams.OrderBy = "mr_id" + } + fishDataSummarySqlWithSearch := fishDataSummarySql + fmt.Sprintf(" order by %s OFFSET %s ROWS FETCH NEXT %s ROWS ONLY", queryParams.OrderBy, strconv.Itoa(offset), strconv.Itoa(queryParams.PageSize)) + dbQuery, err := s.db.Prepare(fishDataSummarySqlWithSearch) + if err != nil { + return fishSummariesWithCount, err + } + + rows, err := dbQuery.Query(year, officeCode, project, approved, season, spice, month, fromDate, toDate) + if err != nil { + return fishSummariesWithCount, err + } + defer rows.Close() + + for rows.Next() { + fishSummary := models.FishSummary{} + err = rows.Scan(&fishSummary.UniqueID, &fishSummary.FishID, &fishSummary.Year, &fishSummary.FieldOffice, &fishSummary.Project, + &fishSummary.Segment, &fishSummary.Season, &fishSummary.Bend, &fishSummary.Bendrn, &fishSummary.BendRiverMile, &fishSummary.Panelhook, + &fishSummary.Species, &fishSummary.HatcheryOrigin, &fishSummary.CheckedBy) + if err != nil { + return fishSummariesWithCount, err + } + fishSummaries = append(fishSummaries, fishSummary) + } + + fishSummariesWithCount.Items = fishSummaries + + return fishSummariesWithCount, err +} + +var suppDataSummaryFullDataSql = `select * FROM table (pallid_data_api.supp_datasummary_fnc(:1, :2, :3, :4, :5, :6, :7, to_date(:8,'MM/DD/YYYY'), to_date(:9,'MM/DD/YYYY')))` + +func (s *PallidSturgeonStore) GetFullSuppDataSummary(year string, officeCode string, project string, approved string, season string, spice string, month string, fromDate string, toDate string) (string, error) { + dbQuery, err := s.db.Prepare(suppDataSummaryFullDataSql) + if err != nil { + return "Cannot create file", err + } + + rows, err := dbQuery.Query(year, officeCode, project, approved, season, spice, month, fromDate, toDate) + if err != nil { + return "Cannot create file", err + } + defer rows.Close() + + cols, _ := rows.Columns() + + file, err := os.Create("SupplementalDataSummary.csv") + if err != nil { + log.Fatal("Cannot create file", err) + } + + defer file.Close() + + writer := csv.NewWriter(file) + defer writer.Flush() + + //save header + data := make([]string, 0) + data = append(data, cols...) + err = writer.Write(data) + if err != nil { + log.Fatal("Cannot write to file", err) + } + + for rows.Next() { + + columns := make([]interface{}, len(cols)) + columnPointers := make([]interface{}, len(cols)) + for i := range columns { + columnPointers[i] = &columns[i] + } + + rows.Scan(columnPointers...) + + data := make([]string, 0) + + for i := range cols { + var v string + val := columns[i] + + if val == nil { + v = "" + } else { + v = fmt.Sprintf("%v", val) + } + data = append(data, v) + } + + err := writer.Write(data) + if err != nil { + log.Fatal("Cannot write to file", err) + } + } + + return file.Name(), err +} + +var suppDataSummarySql = `SELECT fish_code, mr_id, f_id, sid_display, year, FIELD_OFFICE_CODE, PROJECT_CODE, SEGMENT_CODE, SEASON_CODE, BEND_NUMBER, BEND_R_OR_N, bend_river_mile, HATCHERY_ORIGIN_CODE, tag_number,checkby FROM table (pallid_data_api.supp_datasummary_fnc(:1, :2, :3, :4, :5, :6, :7, to_date(:8,'MM/DD/YYYY'), to_date(:9,'MM/DD/YYYY')))` + +var suppDataSummaryCountSql = `SELECT count(*) FROM table (pallid_data_api.supp_datasummary_fnc(:1, :2, :3, :4, :5, :6, :7, to_date(:8,'MM/DD/YYYY'), to_date(:9,'MM/DD/YYYY')))` + +func (s *PallidSturgeonStore) GetSuppDataSummary(year string, officeCode string, project string, approved string, season string, spice string, month string, fromDate string, toDate string, queryParams models.SearchParams) (models.SuppSummaryWithCount, error) { + suppSummariesWithCount := models.SuppSummaryWithCount{} + countQuery, err := s.db.Prepare(suppDataSummaryCountSql) + if err != nil { + return suppSummariesWithCount, err + } + + countrows, err := countQuery.Query(year, officeCode, project, approved, season, spice, month, fromDate, toDate) + if err != nil { + return suppSummariesWithCount, err + } + defer countrows.Close() + + for countrows.Next() { + err = countrows.Scan(&suppSummariesWithCount.TotalCount) + if err != nil { + return suppSummariesWithCount, err + } + } + + suppSummaries := []models.SuppSummary{} + offset := queryParams.PageSize * queryParams.Page + if queryParams.OrderBy == "" { + queryParams.OrderBy = "mr_id" + } + suppDataSummarySqlWithSearch := suppDataSummarySql + fmt.Sprintf(" order by %s OFFSET %s ROWS FETCH NEXT %s ROWS ONLY", queryParams.OrderBy, strconv.Itoa(offset), strconv.Itoa(queryParams.PageSize)) + dbQuery, err := s.db.Prepare(suppDataSummarySqlWithSearch) + if err != nil { + return suppSummariesWithCount, err + } + + rows, err := dbQuery.Query(year, officeCode, project, approved, season, spice, month, fromDate, toDate) + if err != nil { + return suppSummariesWithCount, err + } + defer rows.Close() + + for rows.Next() { + summary := models.SuppSummary{} + err = rows.Scan(&summary.FishCode, &summary.UniqueID, &summary.FishID, &summary.SuppID, &summary.Year, + &summary.FieldOffice, &summary.Project, &summary.Segment, &summary.Season, &summary.Bend, &summary.Bendrn, + &summary.BendRiverMile, &summary.HatcheryOrigin, &summary.TagNumber, &summary.CheckedBy) + if err != nil { + return suppSummariesWithCount, err + } + suppSummaries = append(suppSummaries, summary) + } + + suppSummariesWithCount.Items = suppSummaries + + return suppSummariesWithCount, err +} + +var missouriDataSummaryFullDataSql = `SELECT * FROM table (pallid_data_api.missouri_datasummary_fnc(:1, :2, :3, :4, :5, :6, :7, to_date(:8,'MM/DD/YYYY'), to_date(:9,'MM/DD/YYYY')))` + +func (s *PallidSturgeonStore) GetFullMissouriDataSummary(year string, officeCode string, project string, approved string, season string, spice string, month string, fromDate string, toDate string) (string, error) { + dbQuery, err := s.db.Prepare(missouriDataSummaryFullDataSql) + if err != nil { + return "Cannot create file", err + } + + rows, err := dbQuery.Query(year, officeCode, project, approved, season, spice, month, fromDate, toDate) + if err != nil { + return "Cannot create file", err + } + defer rows.Close() + + cols, _ := rows.Columns() + + file, err := os.Create("MissouriDataSummary.csv") + if err != nil { + log.Fatal("Cannot create file", err) + } + + defer file.Close() + + writer := csv.NewWriter(file) + defer writer.Flush() + + //save header + data := make([]string, 0) + data = append(data, cols...) + err = writer.Write(data) + if err != nil { + log.Fatal("Cannot write to file", err) + } + + for rows.Next() { + + columns := make([]interface{}, len(cols)) + columnPointers := make([]interface{}, len(cols)) + for i := range columns { + columnPointers[i] = &columns[i] + } + + rows.Scan(columnPointers...) + + data := make([]string, 0) + for i := range cols { + var v string + val := columns[i] + + if val == nil { + v = "" + } else if (cols[i] == "SET_DATE") { + if p, okay := val.(string); okay { + v, err = processTimeString(p) + } else { + v, err = processTimeString(fmt.Sprintf("%v", val)) + } + if err != nil { + fmt.Printf("error", err) + } + } else { + v = fmt.Sprintf("%v", val) + } + data = append(data, v) + } + + err := writer.Write(data) + if err != nil { + log.Fatal("Cannot write to file", err) + } + } + + return file.Name(), err +} + +var missouriDataSummarySql = `SELECT mr_id, year, FIELD_OFFICE_CODE, PROJECT_CODE, SEGMENT_CODE, SEASON_CODE, bend_number, BEND_R_OR_N, bend_river_mile, subsample, subsample_pass, set_Date, conductivity, checkby, approved FROM table (pallid_data_api.missouri_datasummary_fnc(:1, :2, :3, :4, :5, :6, :7, to_date(:8,'MM/DD/YYYY'), to_date(:9,'MM/DD/YYYY')))` + +var missouriDataSummaryCountSql = `SELECT count(*) FROM table (pallid_data_api.missouri_datasummary_fnc(:1, :2, :3, :4, :5, :6, :7, to_date(:8,'MM/DD/YYYY'), to_date(:9,'MM/DD/YYYY')))` + +func (s *PallidSturgeonStore) GetMissouriDataSummary(year string, officeCode string, project string, approved string, season string, spice string, month string, fromDate string, toDate string, queryParams models.SearchParams) (models.MissouriSummaryWithCount, error) { + missouriSummariesWithCount := models.MissouriSummaryWithCount{} + countQuery, err := s.db.Prepare(missouriDataSummaryCountSql) + if err != nil { + return missouriSummariesWithCount, err + } + + countrows, err := countQuery.Query(year, officeCode, project, approved, season, spice, month, fromDate, toDate) + if err != nil { + return missouriSummariesWithCount, err + } + defer countrows.Close() + + for countrows.Next() { + err = countrows.Scan(&missouriSummariesWithCount.TotalCount) + if err != nil { + return missouriSummariesWithCount, err + } + } + + missouriSummaries := []models.MissouriSummary{} + offset := queryParams.PageSize * queryParams.Page + if queryParams.OrderBy == "" { + queryParams.OrderBy = "mr_id" + } + missouriDataSummarySqlWithSearch := missouriDataSummarySql + fmt.Sprintf(" order by %s OFFSET %s ROWS FETCH NEXT %s ROWS ONLY", queryParams.OrderBy, strconv.Itoa(offset), strconv.Itoa(queryParams.PageSize)) + dbQuery, err := s.db.Prepare(missouriDataSummarySqlWithSearch) + if err != nil { + return missouriSummariesWithCount, err + } + + rows, err := dbQuery.Query(year, officeCode, project, approved, season, spice, month, fromDate, toDate) + if err != nil { + return missouriSummariesWithCount, err + } + defer rows.Close() + + for rows.Next() { + summary := models.MissouriSummary{} + err = rows.Scan(&summary.UniqueID, &summary.Year, &summary.FieldOffice, &summary.Project, &summary.Segment, + &summary.Season, &summary.Bend, &summary.Bendrn, &summary.BendRiverMile, + &summary.Subsample, &summary.Pass, &summary.SetDate, &summary.Conductivity, &summary.CheckedBy, &summary.Approved) + if err != nil { + return missouriSummariesWithCount, err + } + missouriSummaries = append(missouriSummaries, summary) + } + + missouriSummariesWithCount.Items = missouriSummaries + + return missouriSummariesWithCount, err +} + +var geneticDataSummaryFullDataSql = `SELECT * FROM table (pallid_data_api.genetic_datasummary_fnc(:1, :2, :3, to_date(:4,'MM/DD/YYYY'), to_date(:5,'MM/DD/YYYY'), :6, :7, :8, :9))` + +func (s *PallidSturgeonStore) GetFullGeneticDataSummary(year string, officeCode string, project string, fromDate string, toDate string, broodstock string, hatchwild string, speciesid string, archive string) (string, error) { + dbQuery, err := s.db.Prepare(geneticDataSummaryFullDataSql) + if err != nil { + return "Cannot create file", err + } + + rows, err := dbQuery.Query(year, officeCode, project, fromDate, toDate, broodstock, hatchwild, hatchwild, speciesid) + if err != nil { + return "Cannot create file", err + } + defer rows.Close() + + cols, _ := rows.Columns() + + file, err := os.Create("GeneticDataSummary.csv") + if err != nil { + log.Fatal("Cannot create file", err) + } + + defer file.Close() + + writer := csv.NewWriter(file) + defer writer.Flush() + + //save header + data := make([]string, 0) + data = append(data, cols...) + err = writer.Write(data) + if err != nil { + log.Fatal("Cannot write to file", err) + } + + for rows.Next() { + + columns := make([]interface{}, len(cols)) + columnPointers := make([]interface{}, len(cols)) + for i := range columns { + columnPointers[i] = &columns[i] + } + + rows.Scan(columnPointers...) + + data := make([]string, 0) + for i := range cols { + var v string + val := columns[i] + + if val == nil { + v = "" + } else { + v = fmt.Sprintf("%v", val) + } + data = append(data, v) + } + + err := writer.Write(data) + if err != nil { + log.Fatal("Cannot write to file", err) + } + } + + return file.Name(), err +} + +var geneticDataSummarySql = `SELECT year,FIELD_OFFICE_CODE,PROJECT_CODE,genetics_vial_number,pit_tag,river,river_mile,state,set_date,broodstock_yn,hatchwild_yn,Speciesid_yn,archive_yn FROM table (pallid_data_api.genetic_datasummary_fnc(:1, :2, :3, to_date(:4,'MM/DD/YYYY'), to_date(:5,'MM/DD/YYYY'), :6, :7, :8, :9))` + +var geneticDataSummaryCountSql = `SELECT count(*) FROM table (pallid_data_api.genetic_datasummary_fnc(:1, :2, :3, to_date(:4,'MM/DD/YYYY'), to_date(:5,'MM/DD/YYYY'), :6, :7, :8, :9))` + +func (s *PallidSturgeonStore) GetGeneticDataSummary(year string, officeCode string, project string, fromDate string, toDate string, broodstock string, hatchwild string, speciesid string, archive string, queryParams models.SearchParams) (models.GeneticSummaryWithCount, error) { + geneticSummariesWithCount := models.GeneticSummaryWithCount{} + countQuery, err := s.db.Prepare(geneticDataSummaryCountSql) + if err != nil { + return geneticSummariesWithCount, err + } + + countrows, err := countQuery.Query(year, officeCode, project, fromDate, toDate, broodstock, hatchwild, hatchwild, speciesid) + if err != nil { + return geneticSummariesWithCount, err + } + defer countrows.Close() + + for countrows.Next() { + err = countrows.Scan(&geneticSummariesWithCount.TotalCount) + if err != nil { + return geneticSummariesWithCount, err + } + } + + geneticSummaries := []models.GeneticSummary{} + offset := queryParams.PageSize * queryParams.Page + if queryParams.OrderBy == "" { + queryParams.OrderBy = "genetics_vial_number" + } + geneticDataSummarySqlWithSearch := geneticDataSummarySql + fmt.Sprintf(" order by %s OFFSET %s ROWS FETCH NEXT %s ROWS ONLY", queryParams.OrderBy, strconv.Itoa(offset), strconv.Itoa(queryParams.PageSize)) + dbQuery, err := s.db.Prepare(geneticDataSummarySqlWithSearch) + if err != nil { + return geneticSummariesWithCount, err + } + + rows, err := dbQuery.Query(year, officeCode, project, fromDate, toDate, broodstock, hatchwild, hatchwild, speciesid) + if err != nil { + return geneticSummariesWithCount, err + } + defer rows.Close() + + for rows.Next() { + summary := models.GeneticSummary{} + err = rows.Scan(&summary.Year, &summary.FieldOffice, &summary.Project, &summary.GeneticsVialNumber, + &summary.PitTag, &summary.River, &summary.RiverMile, &summary.State, &summary.SetDate, &summary.Broodstock, + &summary.HatchWild, &summary.SpeciesID, &summary.Archive) + if err != nil { + return geneticSummariesWithCount, err + } + geneticSummaries = append(geneticSummaries, summary) + } + + geneticSummariesWithCount.Items = geneticSummaries + + return geneticSummariesWithCount, err +} + +var searchDataSummaryFullDataSql = `SELECT * FROM table (pallid_data_api.search_datasummary_fnc(:1,:2,:3,:4,:5,:6,:7,to_date(:8,'MM/DD/YYYY'), to_date(:9,'MM/DD/YYYY')))` + +func (s *PallidSturgeonStore) GetFullSearchDataSummary(year string, officeCode string, project string, approved string, season string, segment string, month string, fromDate string, toDate string) (string, error) { + dbQuery, err := s.db.Prepare(searchDataSummaryFullDataSql) + if err != nil { + return "Cannot create file", err + } + + rows, err := dbQuery.Query(year, officeCode, project, approved, season, segment, month, fromDate, toDate) + if err != nil { + return "Cannot create file", err + } + defer rows.Close() + + cols, _ := rows.Columns() + + file, err := os.Create("SearchDataSummary.csv") + if err != nil { + log.Fatal("Cannot create file", err) + } + + defer file.Close() + + writer := csv.NewWriter(file) + defer writer.Flush() + + //save header + data := make([]string, 0) + data = append(data, cols...) + err = writer.Write(data) + if err != nil { + log.Fatal("Cannot write to file", err) + } + + for rows.Next() { + + columns := make([]interface{}, len(cols)) + columnPointers := make([]interface{}, len(cols)) + for i := range columns { + columnPointers[i] = &columns[i] + } + + rows.Scan(columnPointers...) + + data := make([]string, 0) + + for i := range cols { + var v string + val := columns[i] + + if val == nil { + v = "" + } else { + v = fmt.Sprintf("%v", val) + } + data = append(data, v) + } + + err := writer.Write(data) + if err != nil { + log.Fatal("Cannot write to file", err) + } + } + + return file.Name(), err +} + +var searchDataSummarySql = `SELECT year,fieldoffice,project_id,segment_id,season,se_id,search_date,recorder,search_type_code,start_time,start_latitude,start_longitude,stop_time,stop_latitude,stop_longitude,temp,conductivity,bend,bend_river_mile,bendrn,site_id,checkby,search_day +FROM table (pallid_data_api.search_datasummary_fnc(:1,:2,:3,:4,:5,:6,:7,to_date(:8,'MM/DD/YYYY'), to_date(:9,'MM/DD/YYYY')))` + +var searchDataSummaryCountSql = `SELECT count(*) FROM table (pallid_data_api.search_datasummary_fnc(:1,:2,:3,:4,:5,:6,:7,to_date(:8,'MM/DD/YYYY'), to_date(:9,'MM/DD/YYYY')))` + +func (s *PallidSturgeonStore) GetSearchDataSummary(year string, officeCode string, project string, approved string, season string, segment string, month string, fromDate string, toDate string, queryParams models.SearchParams) (models.SearchSummaryWithCount, error) { + searchSummariesWithCount := models.SearchSummaryWithCount{} + query := "" + queryWithCount := "" + + if officeCode == "ZZ" { + query = searchDataSummarySql + queryWithCount = searchDataSummaryCountSql + } + + if (officeCode == "MR") || (officeCode == "MT") { + query = searchDataSummarySql + fmt.Sprintln(" WHERE SEGMENT_ID <= 4") + queryWithCount = searchDataSummaryCountSql + fmt.Sprintln(" WHERE SEGMENT_ID <= 4") + } + + if (officeCode == "SD") || (officeCode == "NE") || (officeCode == "MO") || (officeCode == "IA") || (officeCode == "KC") || (officeCode == "MI") { + query = searchDataSummarySql + fmt.Sprintln(" WHERE SEGMENT_ID >= 7") + queryWithCount = searchDataSummaryCountSql + fmt.Sprintln(" WHERE SEGMENT_ID >= 7") + } + + countQuery, err := s.db.Prepare(queryWithCount) + if err != nil { + return searchSummariesWithCount, err + } + + countrows, err := countQuery.Query(year, officeCode, project, approved, season, segment, month, fromDate, toDate) + if err != nil { + return searchSummariesWithCount, err + } + defer countrows.Close() + + for countrows.Next() { + err = countrows.Scan(&searchSummariesWithCount.TotalCount) + if err != nil { + return searchSummariesWithCount, err + } + } + + searchSummaries := []models.SearchSummary{} + offset := queryParams.PageSize * queryParams.Page + if queryParams.OrderBy == "" { + queryParams.OrderBy = "se_id" + } + searchDataSummarySqlWithSearch := query + fmt.Sprintf(" order by %s OFFSET %s ROWS FETCH NEXT %s ROWS ONLY", queryParams.OrderBy, strconv.Itoa(offset), strconv.Itoa(queryParams.PageSize)) + dbQuery, err := s.db.Prepare(searchDataSummarySqlWithSearch) + if err != nil { + return searchSummariesWithCount, err + } + + rows, err := dbQuery.Query(year, officeCode, project, approved, season, segment, month, fromDate, toDate) + if err != nil { + return searchSummariesWithCount, err + } + defer rows.Close() + + for rows.Next() { + summary := models.SearchSummary{} + err = rows.Scan(&summary.Year, &summary.FieldOffice, &summary.Project, &summary.Segment, &summary.Season, &summary.SeID, &summary.SearchDate, &summary.Recorder, &summary.SearchTypeCode, &summary.StartTime, + &summary.StartLatitude, &summary.StartLongitude, &summary.StopTime, &summary.StopLatitude, &summary.StopLongitude, &summary.Temp, &summary.Conductivity, &summary.Bend, &summary.BendRiverMile, &summary.Bendrn, &summary.SiteID, &summary.Checkby, &summary.SearchDay) + if err != nil { + return searchSummariesWithCount, err + } + searchSummaries = append(searchSummaries, summary) + } + + defer rows.Close() + searchSummariesWithCount.Items = searchSummaries + + return searchSummariesWithCount, err +} + +var telemetryDataSummaryFullDataSql = `select * FROM table (pallid_data_api.telemetry_datasummary_fnc(:1, :2, :3, :4, :5, :6, :7, to_date(:8,'MM/DD/YYYY'), to_date(:9,'MM/DD/YYYY')))` + +func (s *PallidSturgeonStore) GetFullTelemetryDataSummary(year string, officeCode string, project string, approved string, season string, spice string, month string, fromDate string, toDate string) (string, error) { + dbQuery, err := s.db.Prepare(telemetryDataSummaryFullDataSql) + if err != nil { + return "Cannot create file", err + } + + rows, err := dbQuery.Query(year, officeCode, project, approved, season, spice, month, fromDate, toDate) + if err != nil { + return "Cannot create file", err + } + defer rows.Close() + + cols, _ := rows.Columns() + + file, err := os.Create("TelemetryDataSummary.csv") + if err != nil { + log.Fatal("Cannot create file", err) + } + + defer file.Close() + + writer := csv.NewWriter(file) + defer writer.Flush() + + //save header + data := make([]string, 0) + data = append(data, cols...) + err = writer.Write(data) + if err != nil { + log.Fatal("Cannot write to file", err) + } + + for rows.Next() { + + columns := make([]interface{}, len(cols)) + columnPointers := make([]interface{}, len(cols)) + for i := range columns { + columnPointers[i] = &columns[i] + } + + rows.Scan(columnPointers...) + + data := make([]string, 0) + for i := range cols { + var v string + val := columns[i] + + if val == nil { + v = "" + } else { + v = fmt.Sprintf("%v", val) + } + data = append(data, v) + } + + err := writer.Write(data) + if err != nil { + log.Fatal("Cannot write to file", err) + } + } + + return file.Name(), err +} + +var telemetryDataSummarySql = `select t_id, year,field_office_code,project_code,segment_code,season_code,bend_number,radio_tag_num,frequency_id,capture_time, capture_latitude, capture_longitude, position_confidence, macro_code, meso_code, depth, conductivity, turbidity, se_id, site_id, se.search_date, se.search_day, temp, silt, sand, gravel, comments +FROM table (pallid_data_api.telemetry_datasummary_fnc(:1, :2, :3, :4, :5, :6, :7, to_date(:8,'MM/DD/YYYY'), to_date(:9,'MM/DD/YYYY'))) func +inner join ds_search se on se.se_id = func.se_id` + +var telemetryDataSummaryCountSql = `select count(*) FROM table (pallid_data_api.telemetry_datasummary_fnc(:1, :2, :3, :4, :5, :6, :7, to_date(:8,'MM/DD/YYYY'), to_date(:9,'MM/DD/YYYY')))` + +func (s *PallidSturgeonStore) GetTelemetryDataSummary(year string, officeCode string, project string, approved string, season string, spice string, month string, fromDate string, toDate string, queryParams models.SearchParams) (models.TelemetrySummaryWithCount, error) { + telemetrySummaryWithCount := models.TelemetrySummaryWithCount{} + query := "" + queryWithCount := "" + + if officeCode == "ZZ" { + query = telemetryDataSummarySql + queryWithCount = telemetryDataSummaryCountSql + } + + if (officeCode == "MR") || (officeCode == "MT") { + query = telemetryDataSummarySql + fmt.Sprintln(" WHERE SEGMENT_CODE <= 4") + queryWithCount = telemetryDataSummaryCountSql + fmt.Sprintln(" WHERE SEGMENT_CODE <= 4") + } + + if (officeCode == "SD") || (officeCode == "NE") || (officeCode == "MO") || (officeCode == "IA") || (officeCode == "KC") || (officeCode == "MI") { + query = telemetryDataSummarySql + fmt.Sprintln(" WHERE SEGMENT_CODE >= 7") + queryWithCount = telemetryDataSummaryCountSql + fmt.Sprintln(" WHERE SEGMENT_CODE >= 7") + } + + countQuery, err := s.db.Prepare(queryWithCount) + if err != nil { + return telemetrySummaryWithCount, err + } + + countrows, err := countQuery.Query(year, officeCode, project, approved, season, spice, month, fromDate, toDate) + if err != nil { + return telemetrySummaryWithCount, err + } + defer countrows.Close() + + for countrows.Next() { + err = countrows.Scan(&telemetrySummaryWithCount.TotalCount) + if err != nil { + return telemetrySummaryWithCount, err + } + } + + telemetrySummaries := []models.TelemetrySummary{} + offset := queryParams.PageSize * queryParams.Page + if queryParams.OrderBy == "" { + queryParams.OrderBy = "t_id" + } + telemetryDataEntriesSqlWithSearch := query + fmt.Sprintf(" order by %s OFFSET %s ROWS FETCH NEXT %s ROWS ONLY", queryParams.OrderBy, strconv.Itoa(offset), strconv.Itoa(queryParams.PageSize)) + + dbQuery, err := s.db.Prepare(telemetryDataEntriesSqlWithSearch) + if err != nil { + return telemetrySummaryWithCount, err + } + + rows, err := dbQuery.Query(year, officeCode, project, approved, season, spice, month, fromDate, toDate) + if err != nil { + return telemetrySummaryWithCount, err + } + defer rows.Close() + + for rows.Next() { + summary := models.TelemetrySummary{} + err = rows.Scan(&summary.TId, + &summary.Year, + &summary.FieldOffice, + &summary.Project, + &summary.Segment, + &summary.Season, + &summary.Bend, + &summary.RadioTagNum, + &summary.FrequencyIdCode, + &summary.CaptureTime, + &summary.CaptureLatitude, + &summary.CaptureLongitude, + &summary.PositionConfidence, + &summary.MacroId, + &summary.MesoId, + &summary.Depth, + &summary.Conductivity, + &summary.Turbidity, + &summary.SeId, + &summary.SiteID, + &summary.SearchDate, + &summary.SearchDay, + &summary.Temp, + &summary.Silt, + &summary.Sand, + &summary.Gravel, + &summary.Comments) + if err != nil { + return telemetrySummaryWithCount, err + } + telemetrySummaries = append(telemetrySummaries, summary) + } + + telemetrySummaryWithCount.Items = telemetrySummaries + + return telemetrySummaryWithCount, err +} + +var procedureDataSummaryFullDataSql = `select * FROM table (pallid_data_api.procedure_datasummary_fnc(:1, :2, :3, :4, :5, :6, :7, to_date(:8,'MM/DD/YYYY'), to_date(:9,'MM/DD/YYYY')))` + +func (s *PallidSturgeonStore) GetFullProcedureDataSummary(year string, officeCode string, project string, approved string, season string, spice string, month string, fromDate string, toDate string) (string, error) { + dbQuery, err := s.db.Prepare(procedureDataSummaryFullDataSql) + if err != nil { + return "Cannot create file", err + } + + rows, err := dbQuery.Query(year, officeCode, project, approved, season, spice, month, fromDate, toDate) + if err != nil { + return "Cannot create file", err + } + defer rows.Close() + + cols, _ := rows.Columns() + + file, err := os.Create("ProcedureDataSummary.csv") + if err != nil { + log.Fatal("Cannot create file", err) + } + + defer file.Close() + + writer := csv.NewWriter(file) + defer writer.Flush() + + //save header + data := make([]string, 0) + data = append(data, cols...) + err = writer.Write(data) + if err != nil { + log.Fatal("Cannot write to file", err) + } + + for rows.Next() { + + columns := make([]interface{}, len(cols)) + columnPointers := make([]interface{}, len(cols)) + for i := range columns { + columnPointers[i] = &columns[i] + } + + rows.Scan(columnPointers...) + + data := make([]string, 0) + + for i := range cols { + var v string + val := columns[i] + + if val == nil { + v = "" + } else { + v = fmt.Sprintf("%v", val) + } + data = append(data, v) + } + + err := writer.Write(data) + if err != nil { + log.Fatal("Cannot write to file", err) + } + } + + return file.Name(), err +} + +var procedureDataSummarySql = `select pid_display, mr_id, year, field_office_code, project_code, segment_code, season_code, purpose_code, new_radio_tag_num, new_frequency_id, spawn_code, expected_spawn_year, bend_number, bend_r_or_n, bend_river_mile,mr_id FROM table (pallid_data_api.procedure_datasummary_fnc(:1, :2, :3, :4, :5, :6, :7, to_date(:8,'MM/DD/YYYY'), to_date(:9,'MM/DD/YYYY')))` + +var procedureDataSummaryCountSql = `select count(*) FROM table (pallid_data_api.procedure_datasummary_fnc(:1, :2, :3, :4, :5, :6, :7, to_date(:8,'MM/DD/YYYY'), to_date(:9,'MM/DD/YYYY')))` + +func (s *PallidSturgeonStore) GetProcedureDataSummary(year string, officeCode string, project string, approved string, season string, spice string, month string, fromDate string, toDate string, queryParams models.SearchParams) (models.ProcedureSummaryWithCount, error) { + procedureSummaryWithCount := models.ProcedureSummaryWithCount{} + query := "" + queryWithCount := "" + + if officeCode == "ZZ" { + query = procedureDataSummarySql + queryWithCount = procedureDataSummaryCountSql + } + + if (officeCode == "MR") || (officeCode == "MT") { + query = procedureDataSummarySql + fmt.Sprintln(" WHERE SEGMENT_CODE <= 4") + queryWithCount = procedureDataSummaryCountSql + fmt.Sprintln(" WHERE SEGMENT_CODE <= 4") + } + + if (officeCode == "SD") || (officeCode == "NE") || (officeCode == "MO") || (officeCode == "IA") || (officeCode == "KC") || (officeCode == "MI") { + query = procedureDataSummarySql + fmt.Sprintln(" WHERE SEGMENT_CODE >= 7") + queryWithCount = procedureDataSummaryCountSql + fmt.Sprintln(" WHERE SEGMENT_CODE >= 7") + } + + countQuery, err := s.db.Prepare(queryWithCount) + if err != nil { + return procedureSummaryWithCount, err + } + + countrows, err := countQuery.Query(year, officeCode, project, approved, season, spice, month, fromDate, toDate) + if err != nil { + return procedureSummaryWithCount, err + } + defer countrows.Close() + + for countrows.Next() { + err = countrows.Scan(&procedureSummaryWithCount.TotalCount) + if err != nil { + return procedureSummaryWithCount, err + } + } -var seasonsSql = "select * from season_lk order by id" + procedureSummaries := []models.ProcedureSummary{} + offset := queryParams.PageSize * queryParams.Page + if queryParams.OrderBy == "" { + queryParams.OrderBy = "pid_display" + } + procedureDataEntriesSqlWithSearch := query + fmt.Sprintf(" order by %s OFFSET %s ROWS FETCH NEXT %s ROWS ONLY", queryParams.OrderBy, strconv.Itoa(offset), strconv.Itoa(queryParams.PageSize)) + dbQuery, err := s.db.Prepare(procedureDataEntriesSqlWithSearch) + if err != nil { + return procedureSummaryWithCount, err + } -func (s *PallidSturgeonStore) GetSeasons() ([]models.Season, error) { - rows, err := s.db.Query(seasonsSql) + rows, err := dbQuery.Query(year, officeCode, project, approved, season, spice, month, fromDate, toDate) + if err != nil { + return procedureSummaryWithCount, err + } + defer rows.Close() - seasons := []models.Season{} for rows.Next() { - season := models.Season{} - err = rows.Scan(&season.ID, &season.Code, &season.Description, &season.FieldAppFlag, &season.ProjectCode) + summary := models.ProcedureSummary{} + err = rows.Scan(&summary.ID, + &summary.UniqueID, + &summary.Year, + &summary.FieldOffice, + &summary.Project, + &summary.Segment, + &summary.Season, + &summary.PurposeCode, + &summary.NewRadioTagNum, + &summary.NewFrequencyId, + &summary.SpawnCode, + &summary.ExpectedSpawnYear, + &summary.Bend, + &summary.Bendrn, + &summary.BendRiverMile, + &summary.UniqueID) if err != nil { - return nil, err + return procedureSummaryWithCount, err } - seasons = append(seasons, season) + procedureSummaries = append(procedureSummaries, summary) } - return seasons, err + procedureSummaryWithCount.Items = procedureSummaries + + return procedureSummaryWithCount, err +} + +var missouriDatasheetsBySiteId = `select site_id, mr_id, mr_fid, subsample, subsamplepass, subsamplen, recorder, conductivity, bkg_color, fish_count, supp_count, supp_bkg_color, setdate, proc_count, proc_bkg_color from table (pallid_data_entry_api.data_entry_missouri_fnc(:1,:2,:3,:4,:5,:6))` + +var missouriDatasheetsCountBySiteId = `select count(*) from table (pallid_data_entry_api.data_entry_missouri_fnc(:1,:2,:3,:4,:5,:6))` + +func (s *PallidSturgeonStore) GetMissouriDatasheetById(siteId string, officeCode string, project string, segment string, season string, bend string, queryParams models.SearchParams) (models.MoriverDataEntryWithCount, error) { + missouriDatasheetsWithCount := models.MoriverDataEntryWithCount{} + countQuery, err := s.db.Prepare(missouriDatasheetsCountBySiteId) + if err != nil { + return missouriDatasheetsWithCount, err + } + + countrows, err := countQuery.Query(siteId, officeCode, project, segment, season, bend) + if err != nil { + return missouriDatasheetsWithCount, err + } + defer countrows.Close() + + for countrows.Next() { + err = countrows.Scan(&missouriDatasheetsWithCount.TotalCount) + if err != nil { + return missouriDatasheetsWithCount, err + } + } + + missouriDatasheets := []models.UploadMoriver{} + offset := queryParams.PageSize * queryParams.Page + if queryParams.OrderBy == "" { + queryParams.OrderBy = "site_id" + } + missouriDataByIdSqlWithSearch := missouriDatasheetsBySiteId + fmt.Sprintf(" order by %s OFFSET %s ROWS FETCH NEXT %s ROWS ONLY", queryParams.OrderBy, strconv.Itoa(offset), strconv.Itoa(queryParams.PageSize)) + dbQuery, err := s.db.Prepare(missouriDataByIdSqlWithSearch) + if err != nil { + return missouriDatasheetsWithCount, err + } + + rows, err := dbQuery.Query(siteId, officeCode, project, segment, season, bend) + if err != nil { + return missouriDatasheetsWithCount, err + } + defer rows.Close() + + for rows.Next() { + datasheets := models.UploadMoriver{} + err = rows.Scan(&datasheets.SiteID, &datasheets.MrID, &datasheets.MrFid, &datasheets.Subsample, &datasheets.Subsamplepass, &datasheets.Subsamplen, &datasheets.Recorder, &datasheets.Conductivity, &datasheets.BkgColor, + &datasheets.FishCount, &datasheets.SuppCount, &datasheets.SuppBkgColor, &datasheets.SetDate, &datasheets.ProcCount, &datasheets.ProcBkgColor) + if err != nil { + return missouriDatasheetsWithCount, err + } + missouriDatasheets = append(missouriDatasheets, datasheets) + } + + missouriDatasheetsWithCount.Items = missouriDatasheets + + return missouriDatasheetsWithCount, err +} + +var searchDatasheetsBySiteId = `select si.site_id, se.se_id, se.recorder, se.search_type_code, se.start_time, se.start_latitude, se.start_longitude, se.stop_time, se.stop_latitude, se.stop_longitude, +se.temp, se.conductivity +, (select count(t.t_id) + from ds_sites s, ds_search sea, ds_telemetry_fish t + where s.site_id = sea.site_id + and t.se_id = sea.se_id + and si.site_id = s.site_id + and sea.se_id =se.se_id) as telemetry_count +,(CASE WHEN (select count(t.t_id) + from ds_sites s, ds_search sea, ds_telemetry_fish t + where s.site_id = sea.site_id + and t.se_id = sea.se_id + and si.site_id = s.site_id + and sea.se_id =se.se_id) > 0 THEN '#DAF2EA' + ELSE NULL END) as bkg_color +from ds_sites si inner join ds_search se on si.site_id = se.site_id +where si.site_id = :1` + +var searchDatasheetsCountBySiteId = `select count(*) from ds_sites si inner join ds_search se on se.site_id = si.site_id where si.site_id = :1` + +func (s *PallidSturgeonStore) GetSearchDatasheetById(siteId string, queryParams models.SearchParams) (models.UploadSearchData, error) { + searchDatasheetsWithCount := models.UploadSearchData{} + countQuery, err := s.db.Prepare(searchDatasheetsCountBySiteId) + if err != nil { + return searchDatasheetsWithCount, err + } + + countrows, err := countQuery.Query(siteId) + if err != nil { + return searchDatasheetsWithCount, err + } + defer countrows.Close() + + for countrows.Next() { + err = countrows.Scan(&searchDatasheetsWithCount.TotalCount) + if err != nil { + return searchDatasheetsWithCount, err + } + } + + searchDatasheets := []models.UploadSearch{} + offset := queryParams.PageSize * queryParams.Page + if queryParams.OrderBy == "" { + queryParams.OrderBy = "se_id" + } + sqlQueryWithSearch := searchDatasheetsBySiteId + fmt.Sprintf(" order by %s OFFSET %s ROWS FETCH NEXT %s ROWS ONLY", queryParams.OrderBy, strconv.Itoa(offset), strconv.Itoa(queryParams.PageSize)) + dbQuery, err := s.db.Prepare(sqlQueryWithSearch) + if err != nil { + return searchDatasheetsWithCount, err + } + + rows, err := dbQuery.Query(siteId) + if err != nil { + return searchDatasheetsWithCount, err + } + defer rows.Close() + + for rows.Next() { + datasheets := models.UploadSearch{} + err = rows.Scan(&datasheets.SiteId, &datasheets.SeId, &datasheets.Recorder, &datasheets.SearchTypeCode, &datasheets.StartTime, &datasheets.StartLatitude, &datasheets.StartLongitude, &datasheets.StopTime, + &datasheets.StopLatitude, &datasheets.StopLongitude, &datasheets.Temp, &datasheets.Conductivity, &datasheets.TelemetryCount, &datasheets.BkgColor) + if err != nil { + return searchDatasheetsWithCount, err + } + searchDatasheets = append(searchDatasheets, datasheets) + } + + searchDatasheetsWithCount.Items = searchDatasheets + + return searchDatasheetsWithCount, err +} + +var nextUploadSessionIdSql = `SELECT upload_session_seq.nextval from dual` + +func (s *PallidSturgeonStore) GetUploadSessionId() (int, error) { + rows, err := s.db.Query(nextUploadSessionIdSql) + if err != nil { + return 0, err + } + defer rows.Close() + + var nextUploadSessionId int + for rows.Next() { + rows.Scan(&nextUploadSessionId) + } + + return nextUploadSessionId, err } -var insertUploadSiteSql = `insert into upload_site (site_id, site_fid, site_year, fieldoffice_id, +var insertUploadSiteSql = `insert into upload_sites (site_id, site_fid, site_year, fieldoffice_id, field_office, project_id, project, - segment_id, segment, season_id, season, bend, bendrn, bend_river_mile, comments, - last_updated, upload_session_id, uploaded_by, upload_filename) + segment_id, segment, season_id, season, bend, bendrn, bend_river_mile, comments, last_updated, upload_session_id, uploaded_by, upload_filename) values (:1,:2,:3,:4,:5,:6,:7,:8,:9,:10,:11,:12,:13,:14,:15,:16,:17,:18,:19)` func (s *PallidSturgeonStore) SaveSiteUpload(uploadSite models.UploadSite) error { @@ -63,25 +3215,6 @@ func (s *PallidSturgeonStore) SaveSiteUpload(uploadSite models.UploadSite) error return err } -func (s *PallidSturgeonStore) UploadSiteDatasheetCheck(uploadedBy string, uploadSessionId int) error { - uploadSiteStmt, err := s.db.Prepare("begin DATA_UPLOAD.uploadSiteDatasheetCheck (:1,:2); end;") - - //var retVal string - uploadSiteStmt.Exec(godror.PlSQLArrays, uploadedBy, uploadSessionId) - - //fmt.Println(retVal) - - return err -} - -func (s *PallidSturgeonStore) UploadSiteDatasheet(uploadedBy string) error { - uploadSiteStmt, err := s.db.Prepare("begin DATA_UPLOAD.uploadSiteDatasheet (:1); end;") - - uploadSiteStmt.Exec(godror.PlSQLArrays, uploadedBy) - - return err -} - var insertFishUploadSql = `insert into upload_fish (site_id, f_fid, mr_fid, panelhook, bait, species, length, weight, fishcount, fin_curl, otolith, rayspine, scale, ftprefix, ftnum, ftmr, comments, last_updated, upload_session_id, uploaded_by, upload_filename) @@ -90,7 +3223,7 @@ var insertFishUploadSql = `insert into upload_fish (site_id, f_fid, mr_fid, pane func (s *PallidSturgeonStore) SaveFishUpload(uploadFish models.UploadFish) error { _, err := s.db.Exec(insertFishUploadSql, uploadFish.SiteID, - uploadFish.FFid, + uploadFish.Ffid, uploadFish.MrFid, uploadFish.Panelhook, uploadFish.Bait, @@ -115,25 +3248,6 @@ func (s *PallidSturgeonStore) SaveFishUpload(uploadFish models.UploadFish) error return err } -func (s *PallidSturgeonStore) UploadFishDatasheetCheck(uploadedBy string, uploadSessionId int) error { - uploadSiteStmt, err := s.db.Prepare("begin DATA_UPLOAD.uploadFishDatasheetCheck (:1,:2); end;") - - //var retVal string - uploadSiteStmt.Exec(godror.PlSQLArrays, uploadedBy, uploadSessionId) - - //fmt.Println(retVal) - - return err -} - -func (s *PallidSturgeonStore) UploadFishDatasheet(uploadedBy string) error { - uploadSiteStmt, err := s.db.Prepare("begin DATA_UPLOAD.uploadFishDatasheet (:1); end;") - - uploadSiteStmt.Exec(godror.PlSQLArrays, uploadedBy) - - return err -} - var insertSearchUploadSql = `insert into upload_search(se_fid, ds_id, site_id, site_fid, search_date, recorder, search_type_code, search_day, start_time, start_latitude, start_longitude, stop_time, stop_latitude, stop_longitude, temp, conductivity, last_updated, upload_session_id, uploaded_by, upload_filename) @@ -143,7 +3257,8 @@ func (s *PallidSturgeonStore) SaveSearchUpload(uploadSearch models.UploadSearch) _, err := s.db.Exec(insertSearchUploadSql, uploadSearch.SeFid, uploadSearch.DsId, - uploadSearch.SiteID, + uploadSearch.SiteId, + uploadSearch.SiteFid, uploadSearch.SearchDate, uploadSearch.Recorder, uploadSearch.SearchTypeCode, @@ -165,20 +3280,18 @@ func (s *PallidSturgeonStore) SaveSearchUpload(uploadSearch models.UploadSearch) return err } -var insertSupplementalUploadSql = `insert into upload_supplemental (site_id, f_fid, mr_fid, - tagnumber, pitrn, - scuteloc, scutenum, scuteloc2, scutenum2, +var insertSupplementalUploadSql = `insert into upload_supplemental (site_id, f_fid, mr_fid, + tagnumber, pitrn, + scuteloc, scutenum, scuteloc2, scutenum2, elhv, elcolor, erhv, ercolor, cwtyn, dangler, genetic, genetics_vial_number, - broodstock, hatch_wild, species_id, archive, + broodstock, hatch_wild, species_id, archive, head, snouttomouth, inter, mouthwidth, m_ib, - l_ob, l_ib, r_ib, - r_ob, anal, dorsal, status, hatchery_origin, - sex, stage, recapture, photo, + l_ob, l_ib, r_ib, + r_ob, anal, dorsal, status, hatchery_origin, + sex, stage, recapture, photo, genetic_needs, other_tag_info, - comments, - last_updated, upload_session_id,uploaded_by, upload_filename) - - values (:1,:2,:3,:4,:5,:6,:7,:8,:9,:10,:11,:12,:13,:14,:15,:16,:17,:18,:19,:20, + comments, + last_updated, upload_session_id, uploaded_by, upload_filename) values (:1,:2,:3,:4,:5,:6,:7,:8,:9,:10,:11,:12,:13,:14,:15,:16,:17,:18,:19,:20, :21,:22,:23,:24,:25,:26,:27,:28,:29,:30,:31,:32,:33,:34,:35,:36,:37,:38,:39,:40,:41,:42,:43,:44,:45)` func (s *PallidSturgeonStore) SaveSupplementalUpload(uploadSupplemental models.UploadSupplemental) error { @@ -221,6 +3334,8 @@ func (s *PallidSturgeonStore) SaveSupplementalUpload(uploadSupplemental models.U uploadSupplemental.Stage, uploadSupplemental.Recapture, uploadSupplemental.Photo, + uploadSupplemental.GeneticNeeds, + uploadSupplemental.OtherTagInfo, uploadSupplemental.Comments, uploadSupplemental.LastUpdated, uploadSupplemental.UploadSessionId, @@ -231,21 +3346,20 @@ func (s *PallidSturgeonStore) SaveSupplementalUpload(uploadSupplemental models.U return err } -var insertProcedureUploadSql = `insert into upload_procedure (id, f_fid, purpose_code, procedure_date, procedure_start_time, procedure_end_time, procedure_by, +var insertProcedureUploadSql = `insert into upload_procedure (f_fid, mr_fid, purpose_code, procedure_date, procedure_start_time, procedure_end_time, procedure_by, antibiotic_injection_ind, photo_dorsal_ind, photo_ventral_ind, photo_left_ind, old_radio_tag_num, old_frequency_id, dst_serial_num, dst_start_date, dst_start_time, dst_reimplant_ind, new_radio_tag_num, new_frequency_id, sex_code, blood_sample_ind, egg_sample_ind, comments, fish_health_comments, eval_location_code, spawn_code, visual_repro_status_code, ultrasound_repro_status_code, - expected_spawn_year, ultrasound_gonad_length, gonad_condition, - last_updated, upload_session_id, uploaded_by, upload_filename ) + expected_spawn_year, ultrasound_gonad_length, gonad_condition, last_updated, upload_session_id, uploaded_by, upload_filename) values (:1,:2,:3,:4,:5,:6,:7,:8,:9,:10,:11,:12,:13,:14,:15,:16,:17,:18,:19,:20,:21,:22,:23,:24,:25,:26,:27,:28,:29,:30,:31,:32,:33,:34,:35)` func (s *PallidSturgeonStore) SaveProcedureUpload(uploadProcedure models.UploadProcedure) error { _, err := s.db.Exec(insertProcedureUploadSql, - uploadProcedure.Id, uploadProcedure.FFid, + uploadProcedure.MrFid, uploadProcedure.PurposeCode, - uploadProcedure.ProcedurDate, + uploadProcedure.ProcedureDate, uploadProcedure.ProcedureStartTime, uploadProcedure.ProcedureEndTime, uploadProcedure.ProcedureBy, @@ -267,7 +3381,7 @@ func (s *PallidSturgeonStore) SaveProcedureUpload(uploadProcedure models.UploadP uploadProcedure.Comments, uploadProcedure.FishHealthComments, uploadProcedure.EvalLocationCode, - uploadProcedure.SpawnCode, + uploadProcedure.SpawnStatus, uploadProcedure.VisualReproStatusCode, uploadProcedure.UltrasoundReproStatusCode, uploadProcedure.ExpectedSpawnYear, @@ -282,7 +3396,7 @@ func (s *PallidSturgeonStore) SaveProcedureUpload(uploadProcedure models.UploadP return err } -var insertMrUploadSql = `insert into upload_mr (site_id, site_fid, mr_fid, season, setdate, subsample, subsamplepass, +var insertMoriverUploadSql = `insert into upload_mr (site_id, site_fid, mr_fid, season, setdate, subsample, subsamplepass, subsamplen, recorder, gear, gear_type, temp, turbidity, conductivity, do, distance, width, netrivermile, structurenumber, usgs, riverstage, discharge, u1, u2, u3, u4, u5, u6, u7, macro, meso, habitatrn, qc, @@ -293,66 +3407,609 @@ var insertMrUploadSql = `insert into upload_mr (site_id, site_fid, mr_fid, seaso depth3, velocitybot3, velocity08_3, velocity02or06_3, watervel, cobble, organic, silt, sand, gravel, comments, last_updated, upload_session_id, - uploaded_by, upload_filename, complete, checkby, + uploaded_by, upload_filename, complete, no_turbidity, no_velocity) values (:1,:2,:3,:4,:5,:6,:7,:8,:9,:10,:11,:12,:13,:14,:15,:16,:17,:18,:19,:20,:21,:22,:23,:24, :25,:26,:27,:28,:29,:30,:31,:32,:33,:34,:35,:36,:37,:38,:39,:40,:41,:42,:43,:44,:45,:46,:47, :48,:49,:50,:51,:52,:53,:54,:55,:56,:57,:58,:59,:60,:61,:62,:63,:64,:65,:66,:67,:68,:69,:70, - :71,:72)` - -func (s *PallidSturgeonStore) SaveMrUpload(uploadMr models.UploadMr) error { - _, err := s.db.Exec(insertMrUploadSql, - uploadMr.SiteID, uploadMr.SiteFid, uploadMr.MrFid, uploadMr.Season, uploadMr.Setdate, - uploadMr.Subsample, uploadMr.Subsamplepass, uploadMr.Subsamplen, uploadMr.Recorder, - uploadMr.Gear, uploadMr.GearType, uploadMr.Temp, uploadMr.Turbidity, uploadMr.Conductivity, - uploadMr.Do, uploadMr.Distance, uploadMr.Width, uploadMr.Netrivermile, uploadMr.Structurenumber, - uploadMr.Usgs, uploadMr.Riverstage, uploadMr.Discharge, uploadMr.U1, uploadMr.U2, uploadMr.U3, uploadMr.U4, - uploadMr.U5, uploadMr.U6, uploadMr.U7, uploadMr.Macro, uploadMr.Meso, uploadMr.Habitatrn, uploadMr.Qc, - uploadMr.MicroStructure, uploadMr.StructureFlow, uploadMr.StructureMod, uploadMr.SetSite1, uploadMr.SetSite2, uploadMr.SetSite3, - uploadMr.StartTime, uploadMr.StartLatitude, uploadMr.StartLongitude, uploadMr.StopTime, uploadMr.StopLatitude, uploadMr.StopLongitude, - uploadMr.Depth1, uploadMr.Velocitybot1, uploadMr.Velocity08_1, uploadMr.Velocity02or06_1, - uploadMr.Depth2, uploadMr.Velocitybot2, uploadMr.Velocity08_2, uploadMr.Velocity02or06_2, - uploadMr.Depth3, uploadMr.Velocitybot3, uploadMr.Velocity08_3, uploadMr.Velocity02or06_3, - uploadMr.Watervel, uploadMr.Cobble, uploadMr.Organic, uploadMr.Silt, uploadMr.Sand, uploadMr.Gravel, - uploadMr.Comments, uploadMr.LastUpdated, uploadMr.UploadSessionId, - uploadMr.UploadedBy, uploadMr.UploadFilename, uploadMr.Complete, uploadMr.Checkby, - uploadMr.NoTurbidity, uploadMr.NoVelocity, + :71)` + +func (s *PallidSturgeonStore) SaveMoriverUpload(UploadMoriver models.UploadMoriver) error { + _, err := s.db.Exec(insertMoriverUploadSql, + UploadMoriver.SiteID, UploadMoriver.SiteFid, UploadMoriver.MrFid, UploadMoriver.Season, UploadMoriver.SetDate, + UploadMoriver.Subsample, UploadMoriver.Subsamplepass, UploadMoriver.Subsamplen, UploadMoriver.Recorder, + UploadMoriver.Gear, UploadMoriver.GearType, UploadMoriver.Temp, UploadMoriver.Turbidity, UploadMoriver.Conductivity, + UploadMoriver.Do, UploadMoriver.Distance, UploadMoriver.Width, UploadMoriver.Netrivermile, UploadMoriver.Structurenumber, + UploadMoriver.Usgs, UploadMoriver.Riverstage, UploadMoriver.Discharge, UploadMoriver.U1, UploadMoriver.U2, UploadMoriver.U3, UploadMoriver.U4, + UploadMoriver.U5, UploadMoriver.U6, UploadMoriver.U7, UploadMoriver.Macro, UploadMoriver.Meso, UploadMoriver.Habitatrn, UploadMoriver.Qc, + UploadMoriver.MicroStructure, UploadMoriver.StructureFlow, UploadMoriver.StructureMod, UploadMoriver.SetSite1, UploadMoriver.SetSite2, UploadMoriver.SetSite3, + UploadMoriver.StartTime, UploadMoriver.StartLatitude, UploadMoriver.StartLongitude, UploadMoriver.StopTime, UploadMoriver.StopLatitude, UploadMoriver.StopLongitude, + UploadMoriver.Depth1, UploadMoriver.Velocitybot1, UploadMoriver.Velocity08_1, UploadMoriver.Velocity02or06_1, + UploadMoriver.Depth2, UploadMoriver.Velocitybot2, UploadMoriver.Velocity08_2, UploadMoriver.Velocity02or06_2, + UploadMoriver.Depth3, UploadMoriver.Velocitybot3, UploadMoriver.Velocity08_3, UploadMoriver.Velocity02or06_3, + UploadMoriver.Watervel, UploadMoriver.Cobble, UploadMoriver.Organic, UploadMoriver.Silt, UploadMoriver.Sand, UploadMoriver.Gravel, + UploadMoriver.Comments, UploadMoriver.LastUpdated, UploadMoriver.UploadSessionId, + UploadMoriver.UploadedBy, UploadMoriver.UploadFilename, UploadMoriver.Complete, + UploadMoriver.NoTurbidity, UploadMoriver.NoVelocity, ) return err } -var insertTelemetryFishUploadSql = `insert into upload_telemetry_fish(t_fid, se_fid, bend, radio_tag_num, frequency_id_code, capture_time, capture_latitude, capture_longitude, +var insertTelemetryUploadSql = `insert into upload_telemetry_fish(t_fid, se_fid, bend, radio_tag_num, frequency_id_code, capture_time, capture_latitude, capture_longitude, position_confidence, macro_id, meso_id, depth, temp, conductivity, turbidity, silt, sand, gravel, comments, last_updated, upload_session_id, uploaded_by, upload_filename) values (:1,:2,:3,:4,:5,:6,:7,:8,:9,:10,:11,:12,:13,:14,:15,:16,:17,:18,:19,:20,:21,:22,:23)` -func (s *PallidSturgeonStore) SaveTelemetryFishUpload(uploadTelemetryFish models.UploadTelemetryFish) error { - _, err := s.db.Exec(insertTelemetryFishUploadSql, - uploadTelemetryFish.TFid, - uploadTelemetryFish.SeFid, - uploadTelemetryFish.Bend, - uploadTelemetryFish.RadioTagNum, - uploadTelemetryFish.FrequencyIdCode, - uploadTelemetryFish.CaptureTime, - uploadTelemetryFish.CaptureLatitude, - uploadTelemetryFish.CaptureLongitude, - uploadTelemetryFish.PositionConfidence, - uploadTelemetryFish.MacroId, - uploadTelemetryFish.MesoId, - uploadTelemetryFish.Depth, - uploadTelemetryFish.Temp, - uploadTelemetryFish.Conductivity, - uploadTelemetryFish.Turbidity, - uploadTelemetryFish.Silt, - uploadTelemetryFish.Sand, - uploadTelemetryFish.Gravel, - uploadTelemetryFish.Comments, - uploadTelemetryFish.LastUpdated, - uploadTelemetryFish.UploadSessionId, - uploadTelemetryFish.UploadedBy, - uploadTelemetryFish.UploadFilename, +func (s *PallidSturgeonStore) SaveTelemetryUpload(uploadTelemetry models.UploadTelemetry) error { + _, err := s.db.Exec(insertTelemetryUploadSql, + uploadTelemetry.TFid, + uploadTelemetry.SeFid, + uploadTelemetry.Bend, + uploadTelemetry.RadioTagNum, + uploadTelemetry.FrequencyIdCode, + uploadTelemetry.CaptureTime, + uploadTelemetry.CaptureLatitude, + uploadTelemetry.CaptureLongitude, + uploadTelemetry.PositionConfidence, + uploadTelemetry.MacroId, + uploadTelemetry.MesoId, + uploadTelemetry.Depth, + uploadTelemetry.Temp, + uploadTelemetry.Conductivity, + uploadTelemetry.Turbidity, + uploadTelemetry.Silt, + uploadTelemetry.Sand, + uploadTelemetry.Gravel, + uploadTelemetry.Comments, + uploadTelemetry.LastUpdated, + uploadTelemetry.UploadSessionId, + uploadTelemetry.UploadedBy, + uploadTelemetry.UploadFilename, ) return err } + +func (s *PallidSturgeonStore) CallStoreProcedures(uploadedBy string, uploadSessionId int) (models.ProcedureOut, error) { + + procedureOut := models.ProcedureOut{} + + uploadFishStmt, err := s.db.Prepare("begin PALLID_DATA_UPLOAD.uploadFinal (:1,:2,:3,:4,:5,:6,:7,:8,:9,:10,:11,:12); end;") + if err != nil { + return procedureOut, err + } + + var p_site_cnt_final int + var p_mr_cnt_final int + var p_fishCntFinal int + var p_searchCntFinal int + var p_suppCntFinal int + var p_telemetryCntFinal int + var p_procedureCntFinal int + var p_noSite_cnt int + var p_siteMatch int + var p_noSiteID_msg string + + _, err = uploadFishStmt.Exec(godror.PlSQLArrays, uploadedBy, sql.Out{Dest: &p_site_cnt_final}, sql.Out{Dest: &p_mr_cnt_final}, sql.Out{Dest: &p_fishCntFinal}, sql.Out{Dest: &p_searchCntFinal}, sql.Out{Dest: &p_suppCntFinal}, sql.Out{Dest: &p_telemetryCntFinal}, sql.Out{Dest: &p_procedureCntFinal}, sql.Out{Dest: &p_noSite_cnt}, sql.Out{Dest: &p_siteMatch}, sql.Out{Dest: &p_noSiteID_msg}, uploadSessionId) + if err != nil { + return procedureOut, err + } + + uploadFishStmt.Close() + + procedureOut.UploadSessionId = uploadSessionId + procedureOut.UploadedBy = uploadedBy + procedureOut.SiteCntFinal = p_site_cnt_final + procedureOut.MrCntFinal = p_mr_cnt_final + procedureOut.FishCntFinal = p_fishCntFinal + procedureOut.SearchCntFinal = p_searchCntFinal + procedureOut.SuppCntFinal = p_suppCntFinal + procedureOut.TelemetryCntFinal = p_telemetryCntFinal + procedureOut.ProcedureCntFinal = p_procedureCntFinal + procedureOut.NoSiteCnt = p_noSite_cnt + procedureOut.SiteMatch = p_siteMatch + procedureOut.NoSiteIDMsg = p_noSiteID_msg + + return procedureOut, err +} + +var errorCountSql = `select el.year, count(el.el_id) +from site_error_log_v el +where NVL(error_fixed,0) = 0 +and (case +when el.worksheet_type_id = 2 +then (select FIELDOFFICE +from ds_sites +where site_id = (select site_id +from ds_moriver +where mr_id = el.worksheet_id)) +when el.worksheet_type_id = 1 +then NULL +when el.worksheet_type_id in(3,4) +then (select FIELDOFFICE +from ds_sites +where site_id = (select mr2.site_id +from ds_sites s2, ds_moriver mr2, ds_fish f2 +where s2.site_id = mr2.site_id +and mr2.mr_id = F2.MR_ID +and f2.f_id = el.worksheet_id)) +end) = :1 +group by el.year +Order By el.year desc` + +func (s *PallidSturgeonStore) GetErrorCount(fieldOfficeCode string) ([]models.ErrorCount, error) { + errorCounts := []models.ErrorCount{} + + selectQuery, err := s.db.Prepare(errorCountSql) + if err != nil { + return errorCounts, err + } + + rows, err := selectQuery.Query(fieldOfficeCode) + if err != nil { + return errorCounts, err + } + defer rows.Close() + + for rows.Next() { + errorCount := models.ErrorCount{} + err = rows.Scan(&errorCount.Year, &errorCount.Count) + if err != nil { + return errorCounts, err + } + errorCounts = append(errorCounts, errorCount) + } + + return errorCounts, err +} + +var getOfficeErrorLogSql = `select el.site_id, el.year, el.el_id, el.error_entry_date, el.worksheet_type_id, el.field_id, el.error_description, COALESCE(el.error_fixed, 0) as error_fixed, el.worksheet_id, el.form_id +from site_error_log_v el +where (case +when el.worksheet_type_id = 2 +then (select FIELDOFFICE +from ds_sites +where site_id = (select site_id +from ds_moriver +where mr_id = el.worksheet_id)) +when el.worksheet_type_id = 1 +then NULL +when el.worksheet_type_id in(3,4) +then (select FIELDOFFICE +from ds_sites +where site_id = (select mr2.site_id +from ds_sites s2, ds_moriver mr2, ds_fish f2 +where s2.site_id = mr2.site_id +and mr2.mr_id = F2.MR_ID +and f2.f_id = el.worksheet_id)) +end) = :1` + +func (s *PallidSturgeonStore) GetOfficeErrorLogs(fieldOfficeCode string) ([]models.OfficeErrorLog, error) { + officeErrorLogs := []models.OfficeErrorLog{} + + rows, err := s.db.Query(getOfficeErrorLogSql, fieldOfficeCode) + if err != nil { + return officeErrorLogs, err + } + defer rows.Close() + + for rows.Next() { + officeErrorLog := models.OfficeErrorLog{} + err = rows.Scan(&officeErrorLog.SiteID, &officeErrorLog.Year, &officeErrorLog.ElID, &officeErrorLog.ErrorEntryDate, &officeErrorLog.WorksheetTypeID, &officeErrorLog.FieldID, + &officeErrorLog.ErrorDescription, &officeErrorLog.ErrorStatus, &officeErrorLog.WorksheetID, &officeErrorLog.FormID) + if err != nil { + return officeErrorLogs, err + } + officeErrorLogs = append(officeErrorLogs, officeErrorLog) + } + + return officeErrorLogs, err +} + +var usgNoVialNumberSql = `select fo.field_office_description||' : '||p.project_description as fp, +f.species, +f.f_id, mr.mr_id, MR.SITE_ID as mrsite_id, DS.SITE_ID as s_site_id, +f.f_fid, Sup.GENETICS_VIAL_NUMBER +from ds_fish f, ds_supplemental sup, ds_moriver mr, ds_sites ds, project_lk p, segment_lk s, field_office_lk fo +where F.F_ID = Sup.F_ID (+) +and MR.MR_ID = F.MR_ID (+) +and mr.site_id = ds.site_id (+) +and DS.PROJECT_ID = P.PROJECT_CODE (+) +and DS.FIELDOFFICE = fo.FIELD_OFFICE_CODE (+) +and ds.SEGMENT_ID = s.segment_code (+) +and (f.species = 'USG' or f.species = 'PDSG') +and Sup.GENETICS_VIAL_NUMBER IS NULL +and (CASE when :1 != 'ZZ' THEN ds.FIELDOFFICE ELSE :2 END) = :3 +and ds.PROJECT_ID = :4 +order by ds.FIELDOFFICE, ds.PROJECT_ID, ds.SEGMENT_ID, ds.BEND` + +func (s *PallidSturgeonStore) GetUsgNoVialNumbers(fieldOfficeCode string, projectCode string) ([]models.UsgNoVialNumber, error) { + usgNoVialNumbers := []models.UsgNoVialNumber{} + + rows, err := s.db.Query(usgNoVialNumberSql, fieldOfficeCode, fieldOfficeCode, fieldOfficeCode, projectCode) + if err != nil { + return usgNoVialNumbers, err + } + defer rows.Close() + + for rows.Next() { + usgNoVialNumber := models.UsgNoVialNumber{} + err = rows.Scan(&usgNoVialNumber.Fp, &usgNoVialNumber.SpeciesCode, &usgNoVialNumber.FID, &usgNoVialNumber.MrID, &usgNoVialNumber.MrsiteID, &usgNoVialNumber.SSiteID, &usgNoVialNumber.FFID, &usgNoVialNumber.GeneticsVialNumber) + if err != nil { + return usgNoVialNumbers, err + } + usgNoVialNumbers = append(usgNoVialNumbers, usgNoVialNumber) + } + + return usgNoVialNumbers, err +} + +var unapprovedDataSheetsSql = `select asv.ch, +f.field_office_description||' : '||p.project_description as fp, +s.segment_description, +COALESCE(m.bend, 0) as bend, +m.MR_ID, +m.SUBSAMPLE, +m.RECORDER, +m.CHECKBY, +COALESCE(m.NETRIVERMILE, 0) as NETRIVERMILE, +m.site_id, +ds.project_id, ds.segment_id, ds.season, ds.fieldoffice, +ds.sample_unit_type, +m.gear +from DS_MORIVER m, project_lk p, segment_lk s, field_office_lk f, approval_status_v asv, ds_sites ds +where m.site_id = ds.site_id (+) +and ds.segment_id = s.segment_code (+) +and DS.PROJECT_ID = P.PROJECT_CODE (+) +and DS.FIELDOFFICE = F.FIELD_OFFICE_CODE +and m.mr_id = asv.mr_id (+) +and asv.ch = 'Unapproved' +and asv.cb = 'YES' +and ds.project_id = :1 +and (CASE when :2 != 'ZZ' THEN ds.FIELDOFFICE ELSE :3 END) = :4` + +var unapprovedDataSheetsCountSql = `select count(*) +from DS_MORIVER m, project_lk p, segment_lk s, field_office_lk f, approval_status_v asv, ds_sites ds +where m.site_id = ds.site_id (+) +and ds.segment_id = s.segment_code (+) +and DS.PROJECT_ID = P.PROJECT_CODE (+) +and DS.FIELDOFFICE = F.FIELD_OFFICE_CODE +and m.mr_id = asv.mr_id (+) +and asv.ch = 'Unapproved' +and asv.cb = 'YES' +and ds.project_id = :1 +and (CASE when :2 != 'ZZ' THEN ds.FIELDOFFICE ELSE :3 END) = :4` + +func (s *PallidSturgeonStore) GetUnapprovedDataSheets(projectCode string, officeCode string, queryParams models.SearchParams) (models.UnapprovedDataWithCount, error) { + unapprovedDataSheetsWithCount := models.UnapprovedDataWithCount{} + countQuery, err := s.db.Prepare(unapprovedDataSheetsCountSql) + if err != nil { + return unapprovedDataSheetsWithCount, err + } + + countrows, err := countQuery.Query(projectCode, officeCode, officeCode, officeCode) + if err != nil { + return unapprovedDataSheetsWithCount, err + } + defer countrows.Close() + + for countrows.Next() { + err = countrows.Scan(&unapprovedDataSheetsWithCount.TotalCount) + if err != nil { + return unapprovedDataSheetsWithCount, err + } + } + + unapprovedDataSheets := []models.UnapprovedData{} + offset := queryParams.PageSize * queryParams.Page + if queryParams.OrderBy == "" { + queryParams.OrderBy = "mr_id" + } + selectQueryWithSearch := unapprovedDataSheetsSql + fmt.Sprintf(" order by %s OFFSET %s ROWS FETCH NEXT %s ROWS ONLY", queryParams.OrderBy, strconv.Itoa(offset), strconv.Itoa(queryParams.PageSize)) + dbQuery, err := s.db.Prepare(selectQueryWithSearch) + if err != nil { + return unapprovedDataSheetsWithCount, err + } + + rows, err := dbQuery.Query(projectCode, officeCode, officeCode, officeCode) + if err != nil { + return unapprovedDataSheetsWithCount, err + } + defer rows.Close() + + for rows.Next() { + unapprovedData := models.UnapprovedData{} + err = rows.Scan(&unapprovedData.Ch, &unapprovedData.Fp, &unapprovedData.SegmentDescription, &unapprovedData.Bend, &unapprovedData.MrId, &unapprovedData.Subsample, + &unapprovedData.Recorder, &unapprovedData.Checkby, &unapprovedData.NetRiverMile, &unapprovedData.SiteId, &unapprovedData.ProjectId, &unapprovedData.SegmentId, &unapprovedData.Season, + &unapprovedData.FieldOffice, &unapprovedData.SampleUnitType, &unapprovedData.Gear) + if err != nil { + return unapprovedDataSheetsWithCount, err + } + unapprovedDataSheets = append(unapprovedDataSheets, unapprovedData) + } + + unapprovedDataSheetsWithCount.Items = unapprovedDataSheets + + return unapprovedDataSheetsWithCount, err +} + +var bafiDataSheetsSql = `SELECT p.project_description||' : '||s.segment_description||' : Bend '||ds.bend as psb, +ds.site_id, DS.FIELDOFFICE, f.f_id, mr.mr_id, mr.mr_fid, F.SPECIES, +MR.RECORDER, MR.SUBSAMPLE, MR.GEAR, F.FISHCOUNT, +ds.year, ds.segment_id, ds.bend, ds.bendrn, +COALESCE(mr.bendrivermile, 0) as bendrivermile, f.panelhook +from ds_sites ds, ds_moriver mr, ds_fish f, project_lk p, segment_lk s +where DS.SITE_ID = MR.SITE_ID (+) +and MR.MR_ID = F.MR_ID (+) +and DS.PROJECT_ID = P.PROJECT_CODE (+) +and ds.segment_id = s.segment_code (+) +and F.SPECIES = 'BAFI' +and ds.project_id = :1 +and (CASE when :2 != 'ZZ' THEN ds.FIELDOFFICE ELSE :3 END) = :4` + +var bafiDataSheetCountsSql = `SELECT count(*) +from ds_sites ds, ds_moriver mr, ds_fish f, project_lk p, segment_lk s +where DS.SITE_ID = MR.SITE_ID (+) +and MR.MR_ID = F.MR_ID (+) +and DS.PROJECT_ID = P.PROJECT_CODE (+) +and ds.segment_id = s.segment_code (+) +and F.SPECIES = 'BAFI' +and ds.project_id = :1 +and (CASE when :2 != 'ZZ' THEN ds.FIELDOFFICE ELSE :3 END) = :4` + +func (s *PallidSturgeonStore) GetBafiDataSheets(fieldOffice string, projectCode string, queryParams models.SearchParams) (models.BafiDataWithCount, error) { + bafiDataSheetsWithCount := models.BafiDataWithCount{} + countQuery, err := s.db.Prepare(bafiDataSheetCountsSql) + if err != nil { + return bafiDataSheetsWithCount, err + } + + countrows, err := countQuery.Query(projectCode, fieldOffice, fieldOffice, fieldOffice) + if err != nil { + return bafiDataSheetsWithCount, err + } + defer countrows.Close() + + for countrows.Next() { + err = countrows.Scan(&bafiDataSheetsWithCount.TotalCount) + if err != nil { + return bafiDataSheetsWithCount, err + } + } + + bafiDataSheets := []models.BafiData{} + offset := queryParams.PageSize * queryParams.Page + if queryParams.OrderBy == "" { + queryParams.OrderBy = "mr_id" + } + selectQueryWithSearch := bafiDataSheetsSql + fmt.Sprintf(" order by %s OFFSET %s ROWS FETCH NEXT %s ROWS ONLY", queryParams.OrderBy, strconv.Itoa(offset), strconv.Itoa(queryParams.PageSize)) + dbQuery, err := s.db.Prepare(selectQueryWithSearch) + if err != nil { + return bafiDataSheetsWithCount, err + } + + rows, err := dbQuery.Query(projectCode, fieldOffice, fieldOffice, fieldOffice) + if err != nil { + return bafiDataSheetsWithCount, err + } + defer rows.Close() + + for rows.Next() { + bafiData := models.BafiData{} + err = rows.Scan(&bafiData.Psb, &bafiData.SiteId, &bafiData.FieldOffice, &bafiData.FId, &bafiData.MrId, &bafiData.MrFid, &bafiData.Species, &bafiData.Recorder, &bafiData.Subsample, &bafiData.Gear, &bafiData.FishCount, + &bafiData.Year, &bafiData.SegmentId, &bafiData.Bend, &bafiData.Bendrn, &bafiData.BendRiverMile, &bafiData.PanelHook) + if err != nil { + return bafiDataSheetsWithCount, err + } + bafiDataSheets = append(bafiDataSheets, bafiData) + } + + bafiDataSheetsWithCount.Items = bafiDataSheets + + return bafiDataSheetsWithCount, err +} + +var uncheckedDataSheetsSql = `select asv.cb, +p.project_description||' : '||s.segment_description||' : Bend '||ds.BEND as psb, +m.MR_ID, +m.SUBSAMPLE, +m.RECORDER, +m.CHECKBY, +COALESCE(m.NETRIVERMILE, 0) as NETRIVERMILE, +m.site_id, +ds.PROJECT_ID, ds.SEGMENT_ID, ds.SEASON, ds.FIELDOFFICE, m.gear +from DS_MORIVER m, project_lk p, segment_lk s, approval_status_v asv, ds_sites ds +where m.site_id = ds.site_id (+) +and ds.SEGMENT_ID = s.segment_code (+) +and DS.PROJECT_ID = P.project_code (+) +and m.mr_id = asv.mr_id (+) +and asv.cb = 'Unchecked' +and ds.PROJECT_ID = :1 +and M.MR_ID NOT IN (SELECT MR_ID +FROM DS_FISH +WHERE SPECIES = 'BAFI') +and (CASE when :2 != 'ZZ' THEN ds.FIELDOFFICE ELSE :3 END) = :4` + +var uncheckedDataSheetsCountSql = `select count(*) +from DS_MORIVER m, project_lk p, segment_lk s, approval_status_v asv, ds_sites ds +where m.site_id = ds.site_id (+) +and ds.segment_id = s.segment_code (+) +and DS.PROJECT_ID = p.project_code (+) +and m.mr_id = asv.mr_id (+) +and asv.cb = 'Unchecked' +and ds.PROJECT_ID = :1 +and M.MR_ID NOT IN (SELECT MR_ID +FROM DS_FISH +WHERE SPECIES = 'BAFI') +and (CASE when :2 != 'ZZ' THEN ds.FIELDOFFICE ELSE :3 END) = :4` + +func (s *PallidSturgeonStore) GetUncheckedDataSheets(fieldOfficeCode string, projectCode string, queryParams models.SearchParams) (models.UncheckedDataWithCount, error) { + uncheckedDataSheetsWithCount := models.UncheckedDataWithCount{} + countQuery, err := s.db.Prepare(uncheckedDataSheetsCountSql) + if err != nil { + return uncheckedDataSheetsWithCount, err + } + + countrows, err := countQuery.Query(projectCode, fieldOfficeCode, fieldOfficeCode, fieldOfficeCode) + if err != nil { + return uncheckedDataSheetsWithCount, err + } + defer countrows.Close() + + for countrows.Next() { + err = countrows.Scan(&uncheckedDataSheetsWithCount.TotalCount) + if err != nil { + return uncheckedDataSheetsWithCount, err + } + } + + uncheckedDataSheets := []models.UncheckedData{} + offset := queryParams.PageSize * queryParams.Page + if queryParams.OrderBy == "" { + queryParams.OrderBy = "project_id" + } + selectQueryWithSearch := uncheckedDataSheetsSql + fmt.Sprintf(" order by %s OFFSET %s ROWS FETCH NEXT %s ROWS ONLY", queryParams.OrderBy, strconv.Itoa(offset), strconv.Itoa(queryParams.PageSize)) + dbQuery, err := s.db.Prepare(selectQueryWithSearch) + if err != nil { + return uncheckedDataSheetsWithCount, err + } + + rows, err := dbQuery.Query(projectCode, fieldOfficeCode, fieldOfficeCode, fieldOfficeCode) + if err != nil { + return uncheckedDataSheetsWithCount, err + } + defer rows.Close() + + for rows.Next() { + uncheckedData := models.UncheckedData{} + err = rows.Scan(&uncheckedData.Cb, &uncheckedData.Psb, &uncheckedData.MrID, &uncheckedData.Subsample, &uncheckedData.Recorder, &uncheckedData.Checkby, &uncheckedData.Netrivermile, &uncheckedData.SiteID, + &uncheckedData.ProjectID, &uncheckedData.SegmentID, &uncheckedData.Season, &uncheckedData.FieldOffice, &uncheckedData.Gear) + if err != nil { + return uncheckedDataSheetsWithCount, err + } + uncheckedDataSheets = append(uncheckedDataSheets, uncheckedData) + } + + uncheckedDataSheetsWithCount.Items = uncheckedDataSheets + + return uncheckedDataSheetsWithCount, err +} + +func (s *PallidSturgeonStore) GetDownloadZip() (string, error) { + + rows, err := s.db.Query("SELECT content FROM media_tbl where md_id in (select max(md_id) from media_tbl)") + if err != nil { + return "", err + } + defer rows.Close() + + var data []byte + for rows.Next() { + rows.Scan(&data) + } + + downloadInfo, err := s.GetDownloadInfo() + if err != nil { + return "", err + } + + file, err := os.OpenFile( + downloadInfo.Name, + os.O_WRONLY|os.O_TRUNC|os.O_CREATE, + 0666, + ) + if err != nil { + log.Fatal("Cannot write to file", err) + } + defer file.Close() + + bytesWritten, err := file.Write(data) + if err != nil { + log.Fatal(err) + } + log.Printf("Wrote %d bytes.\n", bytesWritten) + return file.Name(), err + +} + +var uploadDownloadInfoSql = `insert into media_tbl (md_id, name, display_name, mime_type, content, last_updated) values ((select max(md_id)+1 from media_tbl),:1,:2,:3,:4,:5) returning md_id into :6` + +func (s *PallidSturgeonStore) UploadDownloadZip(file *multipart.FileHeader) (int, error) { + var id int + fileContent, _ := file.Open() + byteContainer, err := ioutil.ReadAll(fileContent) + if err != nil { + log.Fatal(err) + } + last5 := file.Filename[len(file.Filename)-9:] + words := strings.Split(last5, "_") + numbers := strings.Split(words[2], ".") + version := "Version " + words[0] + "." + words[1] + "." + numbers[0] + lastUpdated := time.Now() + _, err = s.db.Exec(uploadDownloadInfoSql, file.Filename, version, "application/x-zip-compressed", byteContainer, lastUpdated, sql.Out{Dest: &id}) + + return id, err +} + +func (s *PallidSturgeonStore) GetDownloadInfo() (models.DownloadInfo, error) { + downloadInfo := models.DownloadInfo{} + + rows, err := s.db.Query("SELECT name, display_name, last_updated FROM media_tbl where md_id in (select max(md_id) from media_tbl)") + if err != nil { + return downloadInfo, err + } + defer rows.Close() + + for rows.Next() { + rows.Scan(&downloadInfo.Name, &downloadInfo.DisplayName, &downloadInfo.LastUpdated) + } + + return downloadInfo, err +} + +var getUploadSessionLogsSql = `select debug_text, date_created, p_user, upload_session_id from upload_session_log_t where p_user = :1 and upload_session_id = :2` + +func (s *PallidSturgeonStore) GetUploadSessionLogs(user string, uploadSessionId string) ([]models.UploadSessionLog, error) { + rows, err := s.db.Query(getUploadSessionLogsSql, user, uploadSessionId) + + logs := []models.UploadSessionLog{} + if err != nil { + return logs, err + } + defer rows.Close() + + for rows.Next() { + log := models.UploadSessionLog{} + err = rows.Scan(&log.DebugText, &log.DateCreated, &log.PUser, &log.UploadSessionId) + if err != nil { + return nil, err + } + logs = append(logs, log) + } + + return logs, err +} + +var getSitesExportSql = `select site_id, COALESCE(site_fid, 0) as site_fid, year, fieldoffice, field_office_description, project_id, project_description, segment_id, segment_description, season, season_description, bend, bendrn, bend_river_mile, sample_unit_type, sample_unit_desc from table (pallid_data_entry_api.data_entry_site_fnc(:1,:2,:3,:4,:5,:6))` + +func (s *PallidSturgeonStore) GetSitesExport(year string, officeCode string, project string, segment string, season string, bendrn string) ([]models.ExportSite, error) { + rows, err := s.db.Query(getSitesExportSql, year, officeCode, project, bendrn, season, segment) + + exportData := []models.ExportSite{} + if err != nil { + return exportData, err + } + defer rows.Close() + + for rows.Next() { + export := models.ExportSite{} + err = rows.Scan(&export.SiteID, &export.SiteFID, &export.SiteYear, &export.FieldOfficeID, &export.FieldOffice, &export.ProjectId, &export.Project, &export.SegmentId, &export.Segment, &export.SeasonId, &export.Season, &export.Bend, &export.Bendrn, &export.BendRiverMile, &export.SampleUnitType, &export.SampleUnitDesc) + if err != nil { + return nil, err + } + exportData = append(exportData, export) + } + + return exportData, err +} diff --git a/stores/store-utilities.go b/stores/store-utilities.go index ec7b8ec..6d8183c 100644 --- a/stores/store-utilities.go +++ b/stores/store-utilities.go @@ -1,64 +1,68 @@ -package stores - -import ( - "database/sql" - "fmt" - - "github.com/jmoiron/sqlx" - - _ "github.com/jackc/pgx/stdlib" - - _ "github.com/godror/godror" - - "di2e.net/cwbi/pallid_sturgeon_api/server/config" -) - -func InitStores(appConfig *config.AppConfig) (*PallidSturgeonStore, error) { - dburl := fmt.Sprintf("user=%s password=%s connectString=%s:%s/%s poolMaxSessions=50 poolSessionTimeout=42s", - appConfig.Dbuser, appConfig.Dbpass, appConfig.Dbhost, appConfig.Dbport, appConfig.Dbname) - db, err := sql.Open("godror", dburl) - if err != nil { - fmt.Println(err) - return nil, err - } - //defer db.Close() - - ss := PallidSturgeonStore{ - db: db, - config: appConfig, - } - - return &ss, nil -} - -type TransactionFunction func(*sqlx.Tx) - -/* -Transaction Wrapper. -DB Calls within the transaction should panic on fail. i.e. use MustExec vs Exec. -*/ - -// func transaction(db *sqlx.DB, fn TransactionFunction) error { -// var err error -// tx, err := db.Beginx() -// if err != nil { -// log.Printf("Unable to start transaction: %s\n", err) -// return err -// } -// defer func() { -// if r := recover(); r != nil { -// log.Print(r) -// err = tx.Rollback() -// if err != nil { -// log.Printf("Unable to rollback from transaction: %s", err) -// } -// } else { -// err = tx.Commit() -// if err != nil { -// log.Printf("Unable to commit transaction: %s", err) -// } -// } -// }() -// fn(tx) -// return err -// } +package stores + +import ( + "fmt" + "log" + + "github.com/jmoiron/sqlx" + + _ "github.com/jackc/pgx/v4/stdlib" + + _ "github.com/godror/godror" + + "github.com/USACE/pallid_sturgeon_api/server/config" +) + +func InitStores(appConfig *config.AppConfig) (*PallidSturgeonStore, error) { + connectString := fmt.Sprintf("%s:%s/%s", appConfig.Dbhost, appConfig.Dbport, appConfig.Dbname) + db, err := sqlx.Connect( + "godror", + "user="+appConfig.Dbuser+" password="+appConfig.Dbpass+" connectString="+connectString+" poolMaxSessions=100 poolSessionMaxLifetime=2m0s", + ) + + if err != nil { + log.Printf("[InitStores] m=GetDb,msg=connection has failed: %s", err) + return nil, err + } else { + db.SetMaxIdleConns(0) + } + + ss := PallidSturgeonStore{ + db: db, + config: appConfig, + } + + return &ss, nil +} + +type TransactionFunction func(*sqlx.Tx) + +/* +Transaction Wrapper. +DB Calls within the transaction should panic on fail. i.e. use MustExec vs Exec. +*/ + +// func transaction(db *sqlx.DB, fn TransactionFunction) error { +// var err error +// tx, err := db.Beginx() +// if err != nil { +// log.Printf("Unable to start transaction: %s\n", err) +// return err +// } +// defer func() { +// if r := recover(); r != nil { +// log.Print(r) +// err = tx.Rollback() +// if err != nil { +// log.Printf("Unable to rollback from transaction: %s", err) +// } +// } else { +// err = tx.Commit() +// if err != nil { +// log.Printf("Unable to commit transaction: %s", err) +// } +// } +// }() +// fn(tx) +// return err +// }